CombinedText stringlengths 4 3.42M |
|---|
mod abort;
mod breakpoint;
mod bti;
mod common;
mod fp;
mod hvc;
mod ld64b;
mod ldc;
mod mcr;
mod msr;
mod pauth;
mod serror;
mod sve;
#[cfg(test)]
mod tests;
mod wf;
use super::{DecodeError, FieldInfo};
use abort::{decode_iss_data_abort, decode_iss_instruction_abort};
use breakpoint::{
decode_iss_breakpoint, decode_iss_breakpoint_vector_catch, decode_iss_software_step,
decode_iss_watchpoint,
};
use bti::decode_iss_bti;
use fp::decode_iss_fp;
use hvc::decode_iss_hvc;
use ld64b::decode_iss_ld64b;
use ldc::decode_iss_ldc;
use mcr::{decode_iss_mcr, decode_iss_mcrr};
use msr::decode_iss_msr;
use pauth::decode_iss_pauth;
use serror::decode_iss_serror;
use sve::decode_iss_sve;
use wf::decode_iss_wf;
fn decode_iss_res0(iss: u64) -> Result<Vec<FieldInfo>, DecodeError> {
let res0 = FieldInfo::get(iss, "RES0", Some("Reserved"), 0, 25)
.check_res0()?
.with_description("ISS is RES0".to_string());
Ok(vec![res0])
}
/// Decodes the given Exception Syndrome Register value, or returns an error if it is not valid.
pub fn decode(esr: u64) -> Result<Vec<FieldInfo>, DecodeError> {
let res0 = FieldInfo::get(esr, "RES0", Some("Reserved"), 37, 64).check_res0()?;
let iss2 = FieldInfo::get(esr, "ISS2", None, 32, 37);
let ec = FieldInfo::get(esr, "EC", Some("Exception Class"), 26, 32);
let il =
FieldInfo::get_bit(esr, "IL", Some("Instruction Length"), 25).describe_bit(describe_il);
let iss = FieldInfo::get(esr, "ISS", Some("Instruction Specific Syndrome"), 0, 25);
let (class, iss_subfields, iss_description) = match ec.value {
0b000000 => ("Unknown reason", decode_iss_res0(iss.value)?, None),
0b000001 => (
"Wrapped WF* instruction execution",
decode_iss_wf(iss.value)?,
None,
),
0b000011 => (
"Trapped MCR or MRC access with coproc=0b1111",
decode_iss_mcr(iss.value)?,
None,
),
0b000100 => (
"Trapped MCRR or MRRC access with coproc=0b1111",
decode_iss_mcrr(iss.value)?,
None,
),
0b000101 => (
"Trapped MCR or MRC access with coproc=0b1110",
decode_iss_mcr(iss.value)?,
None,
),
0b000110 => (
"Trapped LDC or STC access",
decode_iss_ldc(iss.value)?,
None,
),
0b000111 => (
"Trapped access to SVE, Advanced SIMD or floating point",
decode_iss_sve(iss.value)?,
None,
),
0b001010 => (
"Trapped execution of an LD64B, ST64B, ST64BV, or ST64BV0 instruction",
decode_iss_ld64b(iss.value)?,
None,
),
0b001100 => (
"Trapped MRRC access with (coproc==0b1110)",
decode_iss_mcrr(iss.value)?,
None,
),
0b001101 => ("Branch Target Exception", decode_iss_bti(iss.value)?, None),
0b001110 => ("Illegal Execution state", decode_iss_res0(iss.value)?, None),
0b010001 => (
"SVC instruction execution in AArch32 state",
decode_iss_hvc(iss.value)?,
None,
),
0b010101 => (
"SVC instruction execution in AArch64 state",
decode_iss_hvc(iss.value)?,
None,
),
0b011000 => {
let (subfields, description) = decode_iss_msr(iss.value)?;
(
"Trapped MSR, MRS or System instruction execution in AArch64 state",
subfields,
description,
)
}
0b011001 => (
"Access to SVE functionality trapped as a result of CPACR_EL1.ZEN, CPTR_EL2.ZEN, \
CPTR_EL2.TZ, or CPTR_EL3.EZ",
decode_iss_res0(iss.value)?,
None,
),
0b011100 => (
"Exception from a Pointer Authentication instruction authentication failure",
decode_iss_pauth(iss.value)?,
None,
),
0b100000 => (
"Instruction Abort from a lower Exception level",
decode_iss_instruction_abort(iss.value)?,
None,
),
0b100001 => (
"Instruction Abort taken without a change in Exception level",
decode_iss_instruction_abort(iss.value)?,
None,
),
0b100010 => (
"PC alignment fault exception",
decode_iss_res0(iss.value)?,
None,
),
0b100100 => (
"Data Abort from a lower Exception level",
decode_iss_data_abort(iss.value)?,
None,
),
0b100101 => (
"Data Abort taken without a change in Exception level",
decode_iss_data_abort(iss.value)?,
None,
),
0b100110 => (
"SP alignment fault exception",
decode_iss_res0(iss.value)?,
None,
),
0b101000 => (
"Trapped floating-point exception taken from AArch32 state",
decode_iss_fp(iss.value)?,
None,
),
0b101100 => (
"Trapped floating-point exception taken from AArch64 state",
decode_iss_fp(iss.value)?,
None,
),
0b101111 => ("SError interrupt", decode_iss_serror(iss.value)?, None),
0b110000 => (
"Breakpoint exception from a lower Exception level",
decode_iss_breakpoint_vector_catch(iss.value)?,
None,
),
0b110001 => (
"Breakpoint exception taken without a change in Exception level",
decode_iss_breakpoint_vector_catch(iss.value)?,
None,
),
0b110010 => (
"Software Step exception from a lower Exception level",
decode_iss_software_step(iss.value)?,
None,
),
0b110011 => (
"Software Step exception taken without a change in Exception level",
decode_iss_software_step(iss.value)?,
None,
),
0b110100 => (
"Watchpoint exception from a lower Exception level",
decode_iss_watchpoint(iss.value)?,
None,
),
0b110101 => (
"Watchpoint exception taken without a change in Exception level",
decode_iss_watchpoint(iss.value)?,
None,
),
0b111000 => (
"BKPT instruction execution in AArch32 state",
decode_iss_breakpoint(iss.value)?,
None,
),
0b111100 => (
"BRK instruction execution in AArch64 state",
decode_iss_breakpoint(iss.value)?,
None,
),
_ => return Err(DecodeError::InvalidEc { ec: ec.value }),
};
let iss = FieldInfo {
description: iss_description,
subfields: iss_subfields,
..iss
};
let ec = ec.with_description(class.to_string());
Ok(vec![res0, iss2, ec, il, iss])
}
fn describe_il(il: bool) -> &'static str {
if il {
"32-bit instruction trapped"
} else {
"16-bit instruction trapped"
}
}
esr: Add HVC and SMC ECs for ESR_EL2
mod abort;
mod breakpoint;
mod bti;
mod common;
mod fp;
mod hvc;
mod ld64b;
mod ldc;
mod mcr;
mod msr;
mod pauth;
mod serror;
mod sve;
#[cfg(test)]
mod tests;
mod wf;
use super::{DecodeError, FieldInfo};
use abort::{decode_iss_data_abort, decode_iss_instruction_abort};
use breakpoint::{
decode_iss_breakpoint, decode_iss_breakpoint_vector_catch, decode_iss_software_step,
decode_iss_watchpoint,
};
use bti::decode_iss_bti;
use fp::decode_iss_fp;
use hvc::decode_iss_hvc;
use ld64b::decode_iss_ld64b;
use ldc::decode_iss_ldc;
use mcr::{decode_iss_mcr, decode_iss_mcrr};
use msr::decode_iss_msr;
use pauth::decode_iss_pauth;
use serror::decode_iss_serror;
use sve::decode_iss_sve;
use wf::decode_iss_wf;
fn decode_iss_res0(iss: u64) -> Result<Vec<FieldInfo>, DecodeError> {
let res0 = FieldInfo::get(iss, "RES0", Some("Reserved"), 0, 25)
.check_res0()?
.with_description("ISS is RES0".to_string());
Ok(vec![res0])
}
/// Decodes the given Exception Syndrome Register value, or returns an error if it is not valid.
pub fn decode(esr: u64) -> Result<Vec<FieldInfo>, DecodeError> {
let res0 = FieldInfo::get(esr, "RES0", Some("Reserved"), 37, 64).check_res0()?;
let iss2 = FieldInfo::get(esr, "ISS2", None, 32, 37);
let ec = FieldInfo::get(esr, "EC", Some("Exception Class"), 26, 32);
let il =
FieldInfo::get_bit(esr, "IL", Some("Instruction Length"), 25).describe_bit(describe_il);
let iss = FieldInfo::get(esr, "ISS", Some("Instruction Specific Syndrome"), 0, 25);
let (class, iss_subfields, iss_description) = match ec.value {
0b000000 => ("Unknown reason", decode_iss_res0(iss.value)?, None),
0b000001 => (
"Wrapped WF* instruction execution",
decode_iss_wf(iss.value)?,
None,
),
0b000011 => (
"Trapped MCR or MRC access with coproc=0b1111",
decode_iss_mcr(iss.value)?,
None,
),
0b000100 => (
"Trapped MCRR or MRRC access with coproc=0b1111",
decode_iss_mcrr(iss.value)?,
None,
),
0b000101 => (
"Trapped MCR or MRC access with coproc=0b1110",
decode_iss_mcr(iss.value)?,
None,
),
0b000110 => (
"Trapped LDC or STC access",
decode_iss_ldc(iss.value)?,
None,
),
0b000111 => (
"Trapped access to SVE, Advanced SIMD or floating point",
decode_iss_sve(iss.value)?,
None,
),
0b001010 => (
"Trapped execution of an LD64B, ST64B, ST64BV, or ST64BV0 instruction",
decode_iss_ld64b(iss.value)?,
None,
),
0b001100 => (
"Trapped MRRC access with (coproc==0b1110)",
decode_iss_mcrr(iss.value)?,
None,
),
0b001101 => ("Branch Target Exception", decode_iss_bti(iss.value)?, None),
0b001110 => ("Illegal Execution state", decode_iss_res0(iss.value)?, None),
0b010001 => (
"SVC instruction execution in AArch32 state",
decode_iss_hvc(iss.value)?,
None,
),
0b010101 => (
"SVC instruction execution in AArch64 state",
decode_iss_hvc(iss.value)?,
None,
),
0b010110 => (
"HVC instruction execution in AArch64 state",
decode_iss_hvc(iss.value)?,
None,
),
0b010111 => (
"SMC instruction execution in AArch64 state",
decode_iss_hvc(iss.value)?,
None,
),
0b011000 => {
let (subfields, description) = decode_iss_msr(iss.value)?;
(
"Trapped MSR, MRS or System instruction execution in AArch64 state",
subfields,
description,
)
}
0b011001 => (
"Access to SVE functionality trapped as a result of CPACR_EL1.ZEN, CPTR_EL2.ZEN, \
CPTR_EL2.TZ, or CPTR_EL3.EZ",
decode_iss_res0(iss.value)?,
None,
),
0b011100 => (
"Exception from a Pointer Authentication instruction authentication failure",
decode_iss_pauth(iss.value)?,
None,
),
0b100000 => (
"Instruction Abort from a lower Exception level",
decode_iss_instruction_abort(iss.value)?,
None,
),
0b100001 => (
"Instruction Abort taken without a change in Exception level",
decode_iss_instruction_abort(iss.value)?,
None,
),
0b100010 => (
"PC alignment fault exception",
decode_iss_res0(iss.value)?,
None,
),
0b100100 => (
"Data Abort from a lower Exception level",
decode_iss_data_abort(iss.value)?,
None,
),
0b100101 => (
"Data Abort taken without a change in Exception level",
decode_iss_data_abort(iss.value)?,
None,
),
0b100110 => (
"SP alignment fault exception",
decode_iss_res0(iss.value)?,
None,
),
0b101000 => (
"Trapped floating-point exception taken from AArch32 state",
decode_iss_fp(iss.value)?,
None,
),
0b101100 => (
"Trapped floating-point exception taken from AArch64 state",
decode_iss_fp(iss.value)?,
None,
),
0b101111 => ("SError interrupt", decode_iss_serror(iss.value)?, None),
0b110000 => (
"Breakpoint exception from a lower Exception level",
decode_iss_breakpoint_vector_catch(iss.value)?,
None,
),
0b110001 => (
"Breakpoint exception taken without a change in Exception level",
decode_iss_breakpoint_vector_catch(iss.value)?,
None,
),
0b110010 => (
"Software Step exception from a lower Exception level",
decode_iss_software_step(iss.value)?,
None,
),
0b110011 => (
"Software Step exception taken without a change in Exception level",
decode_iss_software_step(iss.value)?,
None,
),
0b110100 => (
"Watchpoint exception from a lower Exception level",
decode_iss_watchpoint(iss.value)?,
None,
),
0b110101 => (
"Watchpoint exception taken without a change in Exception level",
decode_iss_watchpoint(iss.value)?,
None,
),
0b111000 => (
"BKPT instruction execution in AArch32 state",
decode_iss_breakpoint(iss.value)?,
None,
),
0b111100 => (
"BRK instruction execution in AArch64 state",
decode_iss_breakpoint(iss.value)?,
None,
),
_ => return Err(DecodeError::InvalidEc { ec: ec.value }),
};
let iss = FieldInfo {
description: iss_description,
subfields: iss_subfields,
..iss
};
let ec = ec.with_description(class.to_string());
Ok(vec![res0, iss2, ec, il, iss])
}
fn describe_il(il: bool) -> &'static str {
if il {
"32-bit instruction trapped"
} else {
"16-bit instruction trapped"
}
}
|
#![cfg(test)]
use crate::per_block_processing;
use super::block_processing_builder::BlockProcessingBuilder;
use types::*;
pub const VALIDATOR_COUNT: usize = 10;
#[test]
fn runs_without_error() {
let spec = ChainSpec::foundation();
let mut builder = BlockProcessingBuilder::new(VALIDATOR_COUNT, &spec);
// Set the state and block to be in the last slot of the 4th epoch.
let last_slot_of_epoch = (spec.genesis_epoch + 4).end_slot(spec.slots_per_epoch);
builder.set_slot(last_slot_of_epoch, &spec);
builder.build_caches(&spec);
let (block, mut state) = builder.build(&spec);
per_block_processing(&mut state, &block, &spec).unwrap();
}
mapped out invalid states... 59
#![cfg(test)]
use crate::per_block_processing;
use super::block_processing_builder::BlockProcessingBuilder;
use types::*;
pub const VALIDATOR_COUNT: usize = 10;
#[test]
fn runs_without_error() {
let spec = ChainSpec::foundation();
let mut builder = BlockProcessingBuilder::new(VALIDATOR_COUNT, &spec);
// Set the state and block to be in the last slot of the 4th epoch.
let last_slot_of_epoch = (spec.genesis_epoch + 4).end_slot(spec.slots_per_epoch);
builder.set_slot(last_slot_of_epoch, &spec);
builder.build_caches(&spec);
let (block, mut state) = builder.build(&spec);
per_block_processing(&mut state, &block, &spec).unwrap();
}
// process_block_header
// Invalid::StateSlotMismatch
// Invalid::ParentBlockRootMismatch
// verify_block_signature
// Invalid::BadSignature
// process_randao
// Invalid::BadRandaoSignature
// process_proposer_slashings
// Invalid::MaxProposerSlashingsExceeded
// verify_proposer_slashing
// Invalid::ProposerUnknown
// Invalid::ProposalSlotMismatch
// Invalid::ProposalsIdentical
// Invalid::ProposerAlreadySlashed
// Invalid::ProposerAlreadyWithdrawn
// Invalid::BadProposal1Signature
// Invalid::BadProposal2Signature
// process_attester_slashings
// Invalid::MaxAttesterSlashingsExceed
// verify_attester_slashing
// Invalid::AttestationDataIdentical
// Invalid::NotSlashable
// Invalid::SlashableAttestation1Invalid
// Invalid::SlashableAttestation2Invalid
// process_attestations
// Invalid::MaxAttestationsExceeded
// validate_attestation
// Invalid::PreGenesis
// Invalid::IncludedTooLate
// Invalid::IncludedTooEarly
// Invalid::BadPreviousCrosslink
// Invalid::AggregationBitfieldIsEmpty
// Invalid::CustodyBitfieldHasSetBits
// Invalid::NoCommitteeForShard
// Invalid::BadCustodyBitfieldLength
// Invalid::BadAggregationBitfieldLength
// Invalid::ShardBlockRootNotZero
// verify_justified_epoch_and_root
// Invalid::WrongJustifiedEpoch (current)
// Invalid::WrongJustifiedRoot (current)
// Invalid::WrongJustifiedEpoch (previous)
// Invalid::WrongJustifiedRoot (previous)
// verify_attestation_signature
// Invalid::BadAggregationBitfieldLength
// Invalid::BadCustodyBitfieldLength
// BeaconStateError::UnknownValidator
// Invalid::BadSignature
// process_deposits
// Invalid::MaxDepositsExceeded
// verify_deposit
// Invalid::BadProofOfPossession
// Invalid::BadMerkleProof
// verify_deposit_index
// Invalid::BadIndex
// process_exits
// Invalid::MaxExitsExceeded
// verify_exit
// Invalid::ValidatorUnknown
// Invalid::AlreadyExited
// Invalid::AlreadyInitiatedExited
// Invalid::FutureEpoch
// Invalid::TooYoungToLeave
// Invalid::BadSignature
|
use crate::raw::{AllocRef, Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::TryReserveError;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::RandomState;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// ```
pub struct HashMap<K, V, S = DefaultHashBuilder, A: AllocRef + Clone = Global> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V), A>,
}
impl<K: Clone, V: Clone, S: Clone> Clone for HashMap<K, V, S> {
fn clone(&self) -> Self {
HashMap {
hash_builder: self.hash_builder.clone(),
table: self.table.clone(),
}
}
fn clone_from(&mut self, source: &Self) {
self.table.clone_from(&source.table);
// Update hash_builder only if we successfully cloned all elements.
self.hash_builder.clone_from(&source.hash_builder);
}
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hasher<K: Hash, V>(
hash_builder: &impl BuildHasher,
) -> impl Fn(&(K, V)) -> u64 + '_ {
move |val| make_hash(hash_builder, &val.0)
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
fn equivalent_key<Q, K, V>(k: &Q) -> impl Fn(&(K, V)) -> bool + '_
where
K: Borrow<Q>,
Q: ?Sized + Eq,
{
move |x| k.eq(x.0.borrow())
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
fn equivalent<Q, K>(k: &Q) -> impl Fn(&K) -> bool + '_
where
K: Borrow<Q>,
Q: ?Sized + Eq,
{
move |x| k.eq(x.borrow())
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
#[cfg(feature = "ahash")]
impl<K, V, A: AllocRef + Clone> HashMap<K, V, DefaultHashBuilder, A> {
/// Creates an empty `HashMap` using the given allocator.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
#[cfg_attr(feature = "inline-more", inline)]
pub fn new_in(alloc: A) -> Self {
Self::with_hasher_in(DefaultHashBuilder::default(), alloc)
}
/// Creates an empty `HashMap` with the specified capacity using the given allocator.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self::with_capacity_and_hasher_in(capacity, DefaultHashBuilder::default(), alloc)
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the HashMap to be useful, see its documentation for details.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
pub const fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the HashMap to be useful, see its documentation for details.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(Global, capacity),
}
}
}
impl<K, V, S, A: AllocRef + Clone> HashMap<K, V, S, A> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys. It will be allocated with the given allocator.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self {
Self {
hash_builder,
table: RawTable::new_in(alloc),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys. It will be allocated with the given allocator.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher_in(capacity: usize, hash_builder: S, alloc: A) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(alloc, capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V, A> {
Drain {
inner: self.table.drain(),
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
self.table.erase(item);
}
}
}
}
/// Drains elements which are true under the given predicate,
/// and returns an iterator over the removed items.
///
/// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `true` out
/// into another iterator.
///
/// When the returned DrainedFilter is dropped, any remaining elements that satisfy
/// the predicate are dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
/// let drained: HashMap<i32, i32> = map.drain_filter(|k, _v| k % 2 == 0).collect();
///
/// let mut evens = drained.keys().cloned().collect::<Vec<_>>();
/// let mut odds = map.keys().cloned().collect::<Vec<_>>();
/// evens.sort();
/// odds.sort();
///
/// assert_eq!(evens, vec![0, 2, 4, 6]);
/// assert_eq!(odds, vec![1, 3, 5, 7]);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
DrainFilter {
f,
inner: DrainFilterInner {
iter: unsafe { self.table.iter() },
table: &mut self.table,
},
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S, A> HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
self.table
.reserve(additional, make_hasher(&self.hash_builder));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.table
.try_reserve(additional, make_hasher(&self.hash_builder))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
self.table.shrink_to(0, make_hasher(&self.hash_builder));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.table
.shrink_to(min_capacity, make_hasher(&self.hash_builder));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, equivalent_key(&key)) {
Entry::Occupied(OccupiedEntry {
hash,
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner(k) {
Some(&(_, ref v)) => Some(v),
None => None,
}
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner(k) {
Some(&(ref key, ref value)) => Some((key, value)),
None => None,
}
}
#[inline]
fn get_inner<Q: ?Sized>(&self, k: &Q) -> Option<&(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table.get(hash, equivalent_key(k))
}
/// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// let (k, v) = map.get_key_value_mut(&1).unwrap();
/// assert_eq!(k, &1);
/// assert_eq!(v, &mut "a");
/// *v = "b";
/// assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
/// assert_eq!(map.get_key_value_mut(&2), None);
/// ```
#[inline]
pub fn get_key_value_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<(&K, &mut V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner_mut(k) {
Some(&mut (ref key, ref mut value)) => Some((key, value)),
None => None,
}
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_inner(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner_mut(k) {
Some(&mut (_, ref mut v)) => Some(v),
None => None,
}
}
#[inline]
fn get_inner_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut (K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table.get_mut(hash, equivalent_key(k))
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
let hash = make_hash(&self.hash_builder, &k);
if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) {
Some(mem::replace(item, v))
} else {
self.table
.insert(hash, (k, v), make_hasher(&self.hash_builder));
None
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.remove_entry(k) {
Some((_, v)) => Some(v),
None => None,
}
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, &k);
self.table.remove_entry(hash, equivalent_key(k))
}
}
impl<K, V, S, A: AllocRef + Clone> HashMap<K, V, S, A> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
A: AllocRef + Clone,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S, A> Eq for HashMap<K, V, S, A>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
A: AllocRef + Clone,
{
}
impl<K, V, S, A> Debug for HashMap<K, V, S, A>
where
K: Debug,
V: Debug,
A: AllocRef + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S, A> Default for HashMap<K, V, S, A>
where
S: Default,
A: Default + AllocRef + Clone,
{
/// Creates an empty `HashMap<K, V, S, A>`, with the `Default` value for the hasher and allocator.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher_in(Default::default(), Default::default())
}
}
impl<K, Q: ?Sized, V, S, A> Index<&Q> for HashMap<K, V, S, A>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V, A: AllocRef + Clone = Global> {
inner: RawIntoIter<(K, V), A>,
}
impl<K, V, A: AllocRef + Clone> IntoIter<K, V, A> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V = Global> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V, A: AllocRef + Clone = Global> {
inner: RawDrain<'a, (K, V), A>,
}
impl<K, V, A: AllocRef + Clone> Drain<'_, K, V, A> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A draining iterator over entries of a `HashMap` which don't satisfy the predicate `f`.
///
/// This `struct` is created by the [`drain_filter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain_filter`]: struct.HashMap.html#method.drain_filter
/// [`HashMap`]: struct.HashMap.html
pub struct DrainFilter<'a, K, V, F, A: AllocRef + Clone = Global>
where
F: FnMut(&K, &mut V) -> bool,
{
f: F,
inner: DrainFilterInner<'a, K, V, A>,
}
impl<'a, K, V, F, A> Drop for DrainFilter<'a, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn drop(&mut self) {
while let Some(item) = self.next() {
let guard = ConsumeAllOnDrop(self);
drop(item);
mem::forget(guard);
}
}
}
pub(super) struct ConsumeAllOnDrop<'a, T: Iterator>(pub &'a mut T);
impl<T: Iterator> Drop for ConsumeAllOnDrop<'_, T> {
#[cfg_attr(feature = "inline-more", inline)]
fn drop(&mut self) {
self.0.for_each(drop)
}
}
impl<K, V, F, A> Iterator for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
A: AllocRef + Clone,
{
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next(&mut self.f)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.inner.iter.size_hint().1)
}
}
impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
/// Portions of `DrainFilter` shared with `set::DrainFilter`
pub(super) struct DrainFilterInner<'a, K, V, A: AllocRef + Clone> {
pub iter: RawIter<(K, V)>,
pub table: &'a mut RawTable<(K, V), A>,
}
impl<K, V, A: AllocRef + Clone> DrainFilterInner<'_, K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn next<F>(&mut self, f: &mut F) -> Option<(K, V)>
where
F: FnMut(&K, &mut V) -> bool,
{
unsafe {
while let Some(item) = self.iter.next() {
let &mut (ref key, ref mut value) = item.as_mut();
if f(key, value) {
return Some(self.table.remove(item));
}
}
}
None
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S, A: AllocRef + Clone = Global> {
map: &'a mut HashMap<K, V, S, A>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S, A: AllocRef + Clone> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V, S, A>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S, A>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V, S, A: AllocRef + Clone = Global> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V), A>,
hash_builder: &'a S,
}
unsafe impl<K, V, S, A> Send for RawOccupiedEntryMut<'_, K, V, S, A>
where
K: Send,
V: Send,
A: Send + AllocRef + Clone,
{
}
unsafe impl<K, V, S, A> Sync for RawOccupiedEntryMut<'_, K, V, S, A>
where
K: Sync,
V: Sync,
A: Send + AllocRef + Clone,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S, A: AllocRef + Clone = Global> {
table: &'a mut RawTable<(K, V), A>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S, A: AllocRef + Clone = Global> {
map: &'a HashMap<K, V, S, A>,
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S, A>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, equivalent(k))
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilder<'a, K, V, S, A> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, equivalent(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
match self.map.table.get(hash, |(k, _)| is_match(k)) {
Some(&(ref key, ref value)) => Some((key, value)),
None => None,
}
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryMut<'a, K, V, S, A> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
/// Provides shared access to the key and owned access to the value of
/// an occupied entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::RawEntryMut;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|_k, _v| panic!());
///
/// match entry {
/// RawEntryMut::Vacant(_) => {},
/// RawEntryMut::Occupied(_) => panic!(),
/// }
///
/// map.insert("poneyland", 42);
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// });
///
/// match entry {
/// RawEntryMut::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// },
/// RawEntryMut::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|_k, _v| None);
///
/// match entry {
/// RawEntryMut::Vacant(_) => {},
/// RawEntryMut::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_replace_entry_with<F>(self, f: F) -> Self
where
F: FnOnce(&K, V) -> Option<V>,
{
match self {
RawEntryMut::Occupied(entry) => entry.replace_entry_with(f),
RawEntryMut::Vacant(_) => self,
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawOccupiedEntryMut<'a, K, V, S, A> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe { self.table.remove(self.elem) }
}
/// Provides shared access to the key and owned access to the value of
/// the entry and allows to replace or remove it based on the
/// value of the returned option.
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S, A>
where
F: FnOnce(&K, V) -> Option<V>,
{
unsafe {
let still_occupied = self
.table
.replace_bucket_with(self.elem.clone(), |(key, value)| {
f(&key, value).map(|new_value| (key, new_value))
});
if still_occupied {
RawEntryMut::Occupied(self)
} else {
RawEntryMut::Vacant(RawVacantEntryMut {
table: self.table,
hash_builder: self.hash_builder,
})
}
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawVacantEntryMut<'a, K, V, S, A> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let &mut (ref mut k, ref mut v) =
self.table
.insert_entry(hash, (key, value), make_hasher(self.hash_builder));
(k, v)
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
let &mut (ref mut k, ref mut v) = self
.table
.insert_entry(hash, (key, value), |x| hasher(&x.0));
(k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(
hasher.finish(),
(key, value),
make_hasher(self.hash_builder),
);
RawOccupiedEntryMut {
elem,
table: self.table,
hash_builder: self.hash_builder,
}
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawEntryBuilderMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for RawEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for RawOccupiedEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawVacantEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawEntryBuilder<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S, A>
where
A: AllocRef + Clone,
{
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S, A>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S, A>),
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for Entry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S, A: AllocRef + Clone = Global> {
hash: u64,
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S, A>,
}
unsafe impl<K, V, S, A> Send for OccupiedEntry<'_, K, V, S, A>
where
K: Send,
V: Send,
S: Send,
A: Send + AllocRef + Clone,
{
}
unsafe impl<K, V, S, A> Sync for OccupiedEntry<'_, K, V, S, A>
where
K: Sync,
V: Sync,
S: Sync,
A: Sync + AllocRef + Clone,
{
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for OccupiedEntry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S, A: AllocRef + Clone = Global> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S, A>,
}
impl<K: Debug, V, S, A: AllocRef + Clone> Debug for VacantEntry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a HashMap<K, V, S, A> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a mut HashMap<K, V, S, A> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S, A: AllocRef + Clone> IntoIterator for HashMap<K, V, S, A> {
type Item = (K, V);
type IntoIter = IntoIter<K, V, A>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V, A> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some(x) => unsafe {
let r = x.as_ref();
Some((&r.0, &r.1))
},
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some(x) => unsafe {
let r = x.as_mut();
Some((&r.0, &mut r.1))
},
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V, A: AllocRef + Clone> Iterator for IntoIter<K, V, A> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V, A: AllocRef + Clone> ExactSizeIterator for IntoIter<K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V, A: AllocRef + Clone> FusedIterator for IntoIter<K, V, A> {}
impl<K: Debug, V: Debug, A: AllocRef + Clone> fmt::Debug for IntoIter<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((k, _)) => Some(k),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((_, v)) => Some(v),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((_, v)) => Some(v),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V, A: AllocRef + Clone> Iterator for Drain<'a, K, V, A> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V, A: AllocRef + Clone> ExactSizeIterator for Drain<'_, K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V, A: AllocRef + Clone> FusedIterator for Drain<'_, K, V, A> {}
impl<K, V, A> fmt::Debug for Drain<'_, K, V, A>
where
K: fmt::Debug,
V: fmt::Debug,
A: AllocRef + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Ensures a value is in the entry by inserting, if empty, the result of the default function,
/// which takes the key as its argument, and returns a mutable reference to the value in the
/// entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, usize> = HashMap::new();
///
/// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
///
/// assert_eq!(map["poneyland"], 9);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let value = default(entry.key());
entry.insert(value)
}
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
/// Provides shared access to the key and owned access to the value of
/// an occupied entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|_k, _v| panic!());
///
/// match entry {
/// Entry::Vacant(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// }
/// Entry::Occupied(_) => panic!(),
/// }
///
/// map.insert("poneyland", 42);
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// });
///
/// match entry {
/// Entry::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// }
/// Entry::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|_k, _v| None);
///
/// match entry {
/// Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
/// Entry::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_replace_entry_with<F>(self, f: F) -> Self
where
F: FnOnce(&K, V) -> Option<V>,
{
match self {
Entry::Occupied(entry) => entry.replace_entry_with(f),
Entry::Vacant(_) => self,
}
}
}
impl<'a, K, V: Default, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> OccupiedEntry<'a, K, V, S, A> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe { self.table.table.remove(self.elem) }
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
/// Provides shared access to the key and owned access to the value of
/// the entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.insert("poneyland", 42);
///
/// let entry = match map.entry("poneyland") {
/// Entry::Occupied(e) => {
/// e.replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// })
/// }
/// Entry::Vacant(_) => panic!(),
/// };
///
/// match entry {
/// Entry::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// }
/// Entry::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = match map.entry("poneyland") {
/// Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
/// Entry::Vacant(_) => panic!(),
/// };
///
/// match entry {
/// Entry::Vacant(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// }
/// Entry::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S, A>
where
F: FnOnce(&K, V) -> Option<V>,
{
unsafe {
let mut spare_key = None;
self.table
.table
.replace_bucket_with(self.elem.clone(), |(key, value)| {
if let Some(new_value) = f(&key, value) {
Some((key, new_value))
} else {
spare_key = Some(key);
None
}
});
if let Some(key) = spare_key {
Entry::Vacant(VacantEntry {
hash: self.hash,
key,
table: self.table,
})
} else {
Entry::Occupied(self)
}
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> VacantEntry<'a, K, V, S, A> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let table = &mut self.table.table;
let entry = table.insert_entry(
self.hash,
(self.key, value),
make_hasher(&self.table.hash_builder),
);
&mut entry.1
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
let elem = self.table.table.insert(
self.hash,
(self.key, value),
make_hasher(&self.table.hash_builder),
);
OccupiedEntry {
hash: self.hash,
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher + Default,
A: Default + AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map =
Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
/// Inserts all new key-values from the iterator and replaces values with existing
/// keys with new values returned from the iterator.
impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_one(&mut self, (k, v): (K, V)) {
self.insert(k, v);
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_reserve(&mut self, additional: usize) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let reserve = if self.is_empty() {
additional
} else {
(additional + 1) / 2
};
self.reserve(reserve);
}
}
impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_one(&mut self, (k, v): (&'a K, &'a V)) {
self.insert(*k, *v);
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_reserve(&mut self, additional: usize) {
Extend::<(K, V)>::extend_reserve(self, additional);
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new, A: AllocRef + Clone>(
v: IntoIter<&'static str, u8, A>,
) -> IntoIter<&'new str, u8, A> {
v
}
fn into_iter_val<'new, A: AllocRef + Clone>(
v: IntoIter<u8, &'static str, A>,
) -> IntoIter<u8, &'new str, A> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
use crate::TryReserveError::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
#[test]
fn test_clone_from() {
let mut m = HashMap::new();
let mut m2 = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
m2.clone_from(&m);
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: takes too long
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
assert_eq!(map.iter().len(), xs.len() - 1);
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_occupied_entry_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a.entry(key).insert(value).replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = match a.entry(key) {
Occupied(e) => e.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
}),
Vacant(_) => panic!(),
};
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_entry_and_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a.entry(key).and_replace_entry_with(|_, _| panic!());
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
a.insert(key, value);
let entry = a.entry(key).and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = a.entry(key).and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
});
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_raw_occupied_entry_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a
.raw_entry_mut()
.from_key(&key)
.insert(key, value)
.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
RawEntryMut::Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
RawEntryMut::Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = match a.raw_entry_mut().from_key(&key) {
RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
}),
RawEntryMut::Vacant(_) => panic!(),
};
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_raw_entry_and_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|_, _| panic!());
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
a.insert(key, value);
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
RawEntryMut::Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
RawEntryMut::Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
});
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_replace_entry_with_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
m.entry(x).and_replace_entry_with(|_, _| None);
check(&m);
}
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
fn test_drain_filter() {
{
let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
let drained = map.drain_filter(|&k, _| k % 2 == 0);
let mut out = drained.collect::<Vec<_>>();
out.sort_unstable();
assert_eq!(vec![(0, 0), (2, 20), (4, 40), (6, 60)], out);
assert_eq!(map.len(), 4);
}
{
let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
drop(map.drain_filter(|&k, _| k % 2 == 0));
assert_eq!(map.len(), 4);
}
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocError { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
#[test]
#[cfg(feature = "raw")]
fn test_into_iter_refresh() {
use core::hash::{BuildHasher, Hash, Hasher};
#[cfg(miri)]
const N: usize = 32;
#[cfg(not(miri))]
const N: usize = 128;
let mut rng = rand::thread_rng();
for n in 0..N {
let mut m = HashMap::new();
for i in 0..n {
assert!(m.insert(i, 2 * i).is_none());
}
let hasher = m.hasher().clone();
let mut it = unsafe { m.table.iter() };
assert_eq!(it.len(), n);
let mut i = 0;
let mut left = n;
let mut removed = Vec::new();
loop {
// occasionally remove some elements
if i < n && rng.gen_bool(0.1) {
let mut hsh = hasher.build_hasher();
i.hash(&mut hsh);
let hash = hsh.finish();
unsafe {
let e = m.table.find(hash, |q| q.0.eq(&i));
if let Some(e) = e {
it.reflect_remove(&e);
let t = m.table.remove(e);
removed.push(t);
left -= 1;
} else {
assert!(removed.contains(&(i, 2 * i)), "{} not in {:?}", i, removed);
let e = m
.table
.insert(hash, (i, 2 * i), super::make_hasher(&hasher));
it.reflect_insert(&e);
if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) {
removed.swap_remove(p);
}
left += 1;
}
}
}
let e = it.next();
if e.is_none() {
break;
}
assert!(i < n);
let t = unsafe { e.unwrap().as_ref() };
assert!(!removed.contains(t));
let (k, v) = t;
assert_eq!(*v, 2 * k);
i += 1;
}
assert!(i <= n);
// just for safety:
assert_eq!(m.table.len(), left);
}
}
#[test]
fn test_const_with_hasher() {
use core::hash::BuildHasher;
use std::borrow::ToOwned;
use std::collections::hash_map::DefaultHasher;
#[derive(Clone)]
struct MyHasher;
impl BuildHasher for MyHasher {
type Hasher = DefaultHasher;
fn build_hasher(&self) -> DefaultHasher {
DefaultHasher::new()
}
}
const EMPTY_MAP: HashMap<u32, std::string::String, MyHasher> =
HashMap::with_hasher(MyHasher);
let mut map = EMPTY_MAP.clone();
map.insert(17, "seventeen".to_owned());
assert_eq!("seventeen", map[&17]);
}
}
Imported new documentation for .or_insert_with_key
HashMap and BTreeMap in std have better documentation now for the entry.or_insert_with_key(...) method. This copies the documentation to hashbrown.
use crate::raw::{AllocRef, Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::TryReserveError;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::RandomState;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// ```
pub struct HashMap<K, V, S = DefaultHashBuilder, A: AllocRef + Clone = Global> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V), A>,
}
impl<K: Clone, V: Clone, S: Clone> Clone for HashMap<K, V, S> {
fn clone(&self) -> Self {
HashMap {
hash_builder: self.hash_builder.clone(),
table: self.table.clone(),
}
}
fn clone_from(&mut self, source: &Self) {
self.table.clone_from(&source.table);
// Update hash_builder only if we successfully cloned all elements.
self.hash_builder.clone_from(&source.hash_builder);
}
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hasher<K: Hash, V>(
hash_builder: &impl BuildHasher,
) -> impl Fn(&(K, V)) -> u64 + '_ {
move |val| make_hash(hash_builder, &val.0)
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
fn equivalent_key<Q, K, V>(k: &Q) -> impl Fn(&(K, V)) -> bool + '_
where
K: Borrow<Q>,
Q: ?Sized + Eq,
{
move |x| k.eq(x.0.borrow())
}
/// Ensures that a single closure type across uses of this which, in turn prevents multiple
/// instances of any functions like RawTable::reserve from being generated
#[cfg_attr(feature = "inline-more", inline)]
fn equivalent<Q, K>(k: &Q) -> impl Fn(&K) -> bool + '_
where
K: Borrow<Q>,
Q: ?Sized + Eq,
{
move |x| k.eq(x.borrow())
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
#[cfg(feature = "ahash")]
impl<K, V, A: AllocRef + Clone> HashMap<K, V, DefaultHashBuilder, A> {
/// Creates an empty `HashMap` using the given allocator.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
#[cfg_attr(feature = "inline-more", inline)]
pub fn new_in(alloc: A) -> Self {
Self::with_hasher_in(DefaultHashBuilder::default(), alloc)
}
/// Creates an empty `HashMap` with the specified capacity using the given allocator.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self::with_capacity_and_hasher_in(capacity, DefaultHashBuilder::default(), alloc)
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the HashMap to be useful, see its documentation for details.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
pub const fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// The `hash_builder` passed should implement the [`BuildHasher`] trait for
/// the HashMap to be useful, see its documentation for details.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
///
/// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(Global, capacity),
}
}
}
impl<K, V, S, A: AllocRef + Clone> HashMap<K, V, S, A> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys. It will be allocated with the given allocator.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self {
Self {
hash_builder,
table: RawTable::new_in(alloc),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys. It will be allocated with the given allocator.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher_in(capacity: usize, hash_builder: S, alloc: A) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(alloc, capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V, A> {
Drain {
inner: self.table.drain(),
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
self.table.erase(item);
}
}
}
}
/// Drains elements which are true under the given predicate,
/// and returns an iterator over the removed items.
///
/// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `true` out
/// into another iterator.
///
/// When the returned DrainedFilter is dropped, any remaining elements that satisfy
/// the predicate are dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
/// let drained: HashMap<i32, i32> = map.drain_filter(|k, _v| k % 2 == 0).collect();
///
/// let mut evens = drained.keys().cloned().collect::<Vec<_>>();
/// let mut odds = map.keys().cloned().collect::<Vec<_>>();
/// evens.sort();
/// odds.sort();
///
/// assert_eq!(evens, vec![0, 2, 4, 6]);
/// assert_eq!(odds, vec![1, 3, 5, 7]);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain_filter<F>(&mut self, f: F) -> DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
DrainFilter {
f,
inner: DrainFilterInner {
iter: unsafe { self.table.iter() },
table: &mut self.table,
},
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S, A> HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
self.table
.reserve(additional, make_hasher(&self.hash_builder));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.table
.try_reserve(additional, make_hasher(&self.hash_builder))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
self.table.shrink_to(0, make_hasher(&self.hash_builder));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.table
.shrink_to(min_capacity, make_hasher(&self.hash_builder));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, equivalent_key(&key)) {
Entry::Occupied(OccupiedEntry {
hash,
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner(k) {
Some(&(_, ref v)) => Some(v),
None => None,
}
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner(k) {
Some(&(ref key, ref value)) => Some((key, value)),
None => None,
}
}
#[inline]
fn get_inner<Q: ?Sized>(&self, k: &Q) -> Option<&(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table.get(hash, equivalent_key(k))
}
/// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// let (k, v) = map.get_key_value_mut(&1).unwrap();
/// assert_eq!(k, &1);
/// assert_eq!(v, &mut "a");
/// *v = "b";
/// assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
/// assert_eq!(map.get_key_value_mut(&2), None);
/// ```
#[inline]
pub fn get_key_value_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<(&K, &mut V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner_mut(k) {
Some(&mut (ref key, ref mut value)) => Some((key, value)),
None => None,
}
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_inner(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.get_inner_mut(k) {
Some(&mut (_, ref mut v)) => Some(v),
None => None,
}
}
#[inline]
fn get_inner_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut (K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table.get_mut(hash, equivalent_key(k))
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
let hash = make_hash(&self.hash_builder, &k);
if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) {
Some(mem::replace(item, v))
} else {
self.table
.insert(hash, (k, v), make_hasher(&self.hash_builder));
None
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
// Avoid `Option::map` because it bloats LLVM IR.
match self.remove_entry(k) {
Some((_, v)) => Some(v),
None => None,
}
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, &k);
self.table.remove_entry(hash, equivalent_key(k))
}
}
impl<K, V, S, A: AllocRef + Clone> HashMap<K, V, S, A> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
A: AllocRef + Clone,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S, A> Eq for HashMap<K, V, S, A>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
A: AllocRef + Clone,
{
}
impl<K, V, S, A> Debug for HashMap<K, V, S, A>
where
K: Debug,
V: Debug,
A: AllocRef + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S, A> Default for HashMap<K, V, S, A>
where
S: Default,
A: Default + AllocRef + Clone,
{
/// Creates an empty `HashMap<K, V, S, A>`, with the `Default` value for the hasher and allocator.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher_in(Default::default(), Default::default())
}
}
impl<K, Q: ?Sized, V, S, A> Index<&Q> for HashMap<K, V, S, A>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V, A: AllocRef + Clone = Global> {
inner: RawIntoIter<(K, V), A>,
}
impl<K, V, A: AllocRef + Clone> IntoIter<K, V, A> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V = Global> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V, A: AllocRef + Clone = Global> {
inner: RawDrain<'a, (K, V), A>,
}
impl<K, V, A: AllocRef + Clone> Drain<'_, K, V, A> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A draining iterator over entries of a `HashMap` which don't satisfy the predicate `f`.
///
/// This `struct` is created by the [`drain_filter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain_filter`]: struct.HashMap.html#method.drain_filter
/// [`HashMap`]: struct.HashMap.html
pub struct DrainFilter<'a, K, V, F, A: AllocRef + Clone = Global>
where
F: FnMut(&K, &mut V) -> bool,
{
f: F,
inner: DrainFilterInner<'a, K, V, A>,
}
impl<'a, K, V, F, A> Drop for DrainFilter<'a, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn drop(&mut self) {
while let Some(item) = self.next() {
let guard = ConsumeAllOnDrop(self);
drop(item);
mem::forget(guard);
}
}
}
pub(super) struct ConsumeAllOnDrop<'a, T: Iterator>(pub &'a mut T);
impl<T: Iterator> Drop for ConsumeAllOnDrop<'_, T> {
#[cfg_attr(feature = "inline-more", inline)]
fn drop(&mut self) {
self.0.for_each(drop)
}
}
impl<K, V, F, A> Iterator for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
A: AllocRef + Clone,
{
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<Self::Item> {
self.inner.next(&mut self.f)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
(0, self.inner.iter.size_hint().1)
}
}
impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
/// Portions of `DrainFilter` shared with `set::DrainFilter`
pub(super) struct DrainFilterInner<'a, K, V, A: AllocRef + Clone> {
pub iter: RawIter<(K, V)>,
pub table: &'a mut RawTable<(K, V), A>,
}
impl<K, V, A: AllocRef + Clone> DrainFilterInner<'_, K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn next<F>(&mut self, f: &mut F) -> Option<(K, V)>
where
F: FnMut(&K, &mut V) -> bool,
{
unsafe {
while let Some(item) = self.iter.next() {
let &mut (ref key, ref mut value) = item.as_mut();
if f(key, value) {
return Some(self.table.remove(item));
}
}
}
None
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S, A: AllocRef + Clone = Global> {
map: &'a mut HashMap<K, V, S, A>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S, A: AllocRef + Clone> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V, S, A>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S, A>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V, S, A: AllocRef + Clone = Global> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V), A>,
hash_builder: &'a S,
}
unsafe impl<K, V, S, A> Send for RawOccupiedEntryMut<'_, K, V, S, A>
where
K: Send,
V: Send,
A: Send + AllocRef + Clone,
{
}
unsafe impl<K, V, S, A> Sync for RawOccupiedEntryMut<'_, K, V, S, A>
where
K: Sync,
V: Sync,
A: Send + AllocRef + Clone,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S, A: AllocRef + Clone = Global> {
table: &'a mut RawTable<(K, V), A>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S, A: AllocRef + Clone = Global> {
map: &'a HashMap<K, V, S, A>,
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S, A>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, equivalent(k))
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilder<'a, K, V, S, A> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, equivalent(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
match self.map.table.get(hash, |(k, _)| is_match(k)) {
Some(&(ref key, ref value)) => Some((key, value)),
None => None,
}
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawEntryMut<'a, K, V, S, A> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
/// Provides shared access to the key and owned access to the value of
/// an occupied entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::RawEntryMut;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|_k, _v| panic!());
///
/// match entry {
/// RawEntryMut::Vacant(_) => {},
/// RawEntryMut::Occupied(_) => panic!(),
/// }
///
/// map.insert("poneyland", 42);
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// });
///
/// match entry {
/// RawEntryMut::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// },
/// RawEntryMut::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = map
/// .raw_entry_mut()
/// .from_key("poneyland")
/// .and_replace_entry_with(|_k, _v| None);
///
/// match entry {
/// RawEntryMut::Vacant(_) => {},
/// RawEntryMut::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_replace_entry_with<F>(self, f: F) -> Self
where
F: FnOnce(&K, V) -> Option<V>,
{
match self {
RawEntryMut::Occupied(entry) => entry.replace_entry_with(f),
RawEntryMut::Vacant(_) => self,
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawOccupiedEntryMut<'a, K, V, S, A> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe { self.table.remove(self.elem) }
}
/// Provides shared access to the key and owned access to the value of
/// the entry and allows to replace or remove it based on the
/// value of the returned option.
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S, A>
where
F: FnOnce(&K, V) -> Option<V>,
{
unsafe {
let still_occupied = self
.table
.replace_bucket_with(self.elem.clone(), |(key, value)| {
f(&key, value).map(|new_value| (key, new_value))
});
if still_occupied {
RawEntryMut::Occupied(self)
} else {
RawEntryMut::Vacant(RawVacantEntryMut {
table: self.table,
hash_builder: self.hash_builder,
})
}
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> RawVacantEntryMut<'a, K, V, S, A> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let &mut (ref mut k, ref mut v) =
self.table
.insert_entry(hash, (key, value), make_hasher(self.hash_builder));
(k, v)
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
let &mut (ref mut k, ref mut v) = self
.table
.insert_entry(hash, (key, value), |x| hasher(&x.0));
(k, v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(
hasher.finish(),
(key, value),
make_hasher(self.hash_builder),
);
RawOccupiedEntryMut {
elem,
table: self.table,
hash_builder: self.hash_builder,
}
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawEntryBuilderMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for RawEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for RawOccupiedEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawVacantEntryMut<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S, A: AllocRef + Clone> Debug for RawEntryBuilder<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S, A>
where
A: AllocRef + Clone,
{
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S, A>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S, A>),
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for Entry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S, A: AllocRef + Clone = Global> {
hash: u64,
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S, A>,
}
unsafe impl<K, V, S, A> Send for OccupiedEntry<'_, K, V, S, A>
where
K: Send,
V: Send,
S: Send,
A: Send + AllocRef + Clone,
{
}
unsafe impl<K, V, S, A> Sync for OccupiedEntry<'_, K, V, S, A>
where
K: Sync,
V: Sync,
S: Sync,
A: Sync + AllocRef + Clone,
{
}
impl<K: Debug, V: Debug, S, A: AllocRef + Clone> Debug for OccupiedEntry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S, A: AllocRef + Clone = Global> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S, A>,
}
impl<K: Debug, V, S, A: AllocRef + Clone> Debug for VacantEntry<'_, K, V, S, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a HashMap<K, V, S, A> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a mut HashMap<K, V, S, A> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S, A: AllocRef + Clone> IntoIterator for HashMap<K, V, S, A> {
type Item = (K, V);
type IntoIter = IntoIter<K, V, A>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V, A> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some(x) => unsafe {
let r = x.as_ref();
Some((&r.0, &r.1))
},
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some(x) => unsafe {
let r = x.as_mut();
Some((&r.0, &mut r.1))
},
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V, A: AllocRef + Clone> Iterator for IntoIter<K, V, A> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V, A: AllocRef + Clone> ExactSizeIterator for IntoIter<K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V, A: AllocRef + Clone> FusedIterator for IntoIter<K, V, A> {}
impl<K: Debug, V: Debug, A: AllocRef + Clone> fmt::Debug for IntoIter<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((k, _)) => Some(k),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((_, v)) => Some(v),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
// Avoid `Option::map` because it bloats LLVM IR.
match self.inner.next() {
Some((_, v)) => Some(v),
None => None,
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V, A: AllocRef + Clone> Iterator for Drain<'a, K, V, A> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V, A: AllocRef + Clone> ExactSizeIterator for Drain<'_, K, V, A> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V, A: AllocRef + Clone> FusedIterator for Drain<'_, K, V, A> {}
impl<K, V, A> fmt::Debug for Drain<'_, K, V, A>
where
K: fmt::Debug,
V: fmt::Debug,
A: AllocRef + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Ensures a value is in the entry by inserting, if empty, the result of the default function.
/// This method allows for generating key-derived values for insertion by providing the default
/// function a reference to the key that was moved during the `.entry(key)` method call.
///
/// The reference to the moved key is provided so that cloning or copying the key is
/// unnecessary, unlike with `.or_insert_with(|| ... )`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, usize> = HashMap::new();
///
/// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
///
/// assert_eq!(map["poneyland"], 9);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let value = default(entry.key());
entry.insert(value)
}
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
/// Provides shared access to the key and owned access to the value of
/// an occupied entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|_k, _v| panic!());
///
/// match entry {
/// Entry::Vacant(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// }
/// Entry::Occupied(_) => panic!(),
/// }
///
/// map.insert("poneyland", 42);
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// });
///
/// match entry {
/// Entry::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// }
/// Entry::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = map
/// .entry("poneyland")
/// .and_replace_entry_with(|_k, _v| None);
///
/// match entry {
/// Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
/// Entry::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_replace_entry_with<F>(self, f: F) -> Self
where
F: FnOnce(&K, V) -> Option<V>,
{
match self {
Entry::Occupied(entry) => entry.replace_entry_with(f),
Entry::Vacant(_) => self,
}
}
}
impl<'a, K, V: Default, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> OccupiedEntry<'a, K, V, S, A> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe { self.table.table.remove(self.elem) }
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
/// Provides shared access to the key and owned access to the value of
/// the entry and allows to replace or remove it based on the
/// value of the returned option.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.insert("poneyland", 42);
///
/// let entry = match map.entry("poneyland") {
/// Entry::Occupied(e) => {
/// e.replace_entry_with(|k, v| {
/// assert_eq!(k, &"poneyland");
/// assert_eq!(v, 42);
/// Some(v + 1)
/// })
/// }
/// Entry::Vacant(_) => panic!(),
/// };
///
/// match entry {
/// Entry::Occupied(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// assert_eq!(e.get(), &43);
/// }
/// Entry::Vacant(_) => panic!(),
/// }
///
/// assert_eq!(map["poneyland"], 43);
///
/// let entry = match map.entry("poneyland") {
/// Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
/// Entry::Vacant(_) => panic!(),
/// };
///
/// match entry {
/// Entry::Vacant(e) => {
/// assert_eq!(e.key(), &"poneyland");
/// }
/// Entry::Occupied(_) => panic!(),
/// }
///
/// assert!(!map.contains_key("poneyland"));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S, A>
where
F: FnOnce(&K, V) -> Option<V>,
{
unsafe {
let mut spare_key = None;
self.table
.table
.replace_bucket_with(self.elem.clone(), |(key, value)| {
if let Some(new_value) = f(&key, value) {
Some((key, new_value))
} else {
spare_key = Some(key);
None
}
});
if let Some(key) = spare_key {
Entry::Vacant(VacantEntry {
hash: self.hash,
key,
table: self.table,
})
} else {
Entry::Occupied(self)
}
}
}
}
impl<'a, K, V, S, A: AllocRef + Clone> VacantEntry<'a, K, V, S, A> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let table = &mut self.table.table;
let entry = table.insert_entry(
self.hash,
(self.key, value),
make_hasher(&self.table.hash_builder),
);
&mut entry.1
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
where
K: Hash,
S: BuildHasher,
{
let elem = self.table.table.insert(
self.hash,
(self.key, value),
make_hasher(&self.table.hash_builder),
);
OccupiedEntry {
hash: self.hash,
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher + Default,
A: Default + AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map =
Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
/// Inserts all new key-values from the iterator and replaces values with existing
/// keys with new values returned from the iterator.
impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
where
K: Eq + Hash,
S: BuildHasher,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_one(&mut self, (k, v): (K, V)) {
self.insert(k, v);
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_reserve(&mut self, additional: usize) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let reserve = if self.is_empty() {
additional
} else {
(additional + 1) / 2
};
self.reserve(reserve);
}
}
impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
A: AllocRef + Clone,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_one(&mut self, (k, v): (&'a K, &'a V)) {
self.insert(*k, *v);
}
#[inline]
#[cfg(feature = "nightly")]
fn extend_reserve(&mut self, additional: usize) {
Extend::<(K, V)>::extend_reserve(self, additional);
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new, A: AllocRef + Clone>(
v: IntoIter<&'static str, u8, A>,
) -> IntoIter<&'new str, u8, A> {
v
}
fn into_iter_val<'new, A: AllocRef + Clone>(
v: IntoIter<u8, &'static str, A>,
) -> IntoIter<u8, &'new str, A> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
use crate::TryReserveError::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
#[test]
fn test_clone_from() {
let mut m = HashMap::new();
let mut m2 = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
m2.clone_from(&m);
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: takes too long
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
assert_eq!(map.iter().len(), xs.len() - 1);
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_occupied_entry_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a.entry(key).insert(value).replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = match a.entry(key) {
Occupied(e) => e.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
}),
Vacant(_) => panic!(),
};
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_entry_and_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a.entry(key).and_replace_entry_with(|_, _| panic!());
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
a.insert(key, value);
let entry = a.entry(key).and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = a.entry(key).and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
});
match entry {
Vacant(e) => assert_eq!(e.key(), &key),
Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_raw_occupied_entry_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a
.raw_entry_mut()
.from_key(&key)
.insert(key, value)
.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
RawEntryMut::Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
RawEntryMut::Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = match a.raw_entry_mut().from_key(&key) {
RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
}),
RawEntryMut::Vacant(_) => panic!(),
};
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_raw_entry_and_replace_entry_with() {
let mut a = HashMap::new();
let key = "a key";
let value = "an initial value";
let new_value = "a new value";
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|_, _| panic!());
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
a.insert(key, value);
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, value);
Some(new_value)
});
match entry {
RawEntryMut::Occupied(e) => {
assert_eq!(e.key(), &key);
assert_eq!(e.get(), &new_value);
}
RawEntryMut::Vacant(_) => panic!(),
}
assert_eq!(a[key], new_value);
assert_eq!(a.len(), 1);
let entry = a
.raw_entry_mut()
.from_key(&key)
.and_replace_entry_with(|k, v| {
assert_eq!(k, &key);
assert_eq!(v, new_value);
None
});
match entry {
RawEntryMut::Vacant(_) => {}
RawEntryMut::Occupied(_) => panic!(),
}
assert!(!a.contains_key(key));
assert_eq!(a.len(), 0);
}
#[test]
fn test_replace_entry_with_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
m.entry(x).and_replace_entry_with(|_, _| None);
check(&m);
}
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
fn test_drain_filter() {
{
let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
let drained = map.drain_filter(|&k, _| k % 2 == 0);
let mut out = drained.collect::<Vec<_>>();
out.sort_unstable();
assert_eq!(vec![(0, 0), (2, 20), (4, 40), (6, 60)], out);
assert_eq!(map.len(), 4);
}
{
let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
drop(map.drain_filter(|&k, _| k % 2 == 0));
assert_eq!(map.len(), 4);
}
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocError { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
#[test]
#[cfg(feature = "raw")]
fn test_into_iter_refresh() {
use core::hash::{BuildHasher, Hash, Hasher};
#[cfg(miri)]
const N: usize = 32;
#[cfg(not(miri))]
const N: usize = 128;
let mut rng = rand::thread_rng();
for n in 0..N {
let mut m = HashMap::new();
for i in 0..n {
assert!(m.insert(i, 2 * i).is_none());
}
let hasher = m.hasher().clone();
let mut it = unsafe { m.table.iter() };
assert_eq!(it.len(), n);
let mut i = 0;
let mut left = n;
let mut removed = Vec::new();
loop {
// occasionally remove some elements
if i < n && rng.gen_bool(0.1) {
let mut hsh = hasher.build_hasher();
i.hash(&mut hsh);
let hash = hsh.finish();
unsafe {
let e = m.table.find(hash, |q| q.0.eq(&i));
if let Some(e) = e {
it.reflect_remove(&e);
let t = m.table.remove(e);
removed.push(t);
left -= 1;
} else {
assert!(removed.contains(&(i, 2 * i)), "{} not in {:?}", i, removed);
let e = m
.table
.insert(hash, (i, 2 * i), super::make_hasher(&hasher));
it.reflect_insert(&e);
if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) {
removed.swap_remove(p);
}
left += 1;
}
}
}
let e = it.next();
if e.is_none() {
break;
}
assert!(i < n);
let t = unsafe { e.unwrap().as_ref() };
assert!(!removed.contains(t));
let (k, v) = t;
assert_eq!(*v, 2 * k);
i += 1;
}
assert!(i <= n);
// just for safety:
assert_eq!(m.table.len(), left);
}
}
#[test]
fn test_const_with_hasher() {
use core::hash::BuildHasher;
use std::borrow::ToOwned;
use std::collections::hash_map::DefaultHasher;
#[derive(Clone)]
struct MyHasher;
impl BuildHasher for MyHasher {
type Hasher = DefaultHasher;
fn build_hasher(&self) -> DefaultHasher {
DefaultHasher::new()
}
}
const EMPTY_MAP: HashMap<u32, std::string::String, MyHasher> =
HashMap::with_hasher(MyHasher);
let mut map = EMPTY_MAP.clone();
map.insert(17, "seventeen".to_owned());
assert_eq!("seventeen", map[&17]);
}
}
|
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::CollectionAllocErr;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::ABuildHasher;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// fn main() {
/// let timber_resources: HashMap<&str, i32> =
/// [("Norway", 100),
/// ("Denmark", 50),
/// ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// }
/// ```
#[derive(Clone)]
pub struct HashMap<K, V, S = DefaultHashBuilder> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V)>,
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Drain {
inner: self.table.drain(),
}
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S> HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
let hash_builder = &self.hash_builder;
self.table
.reserve(additional, |x| make_hash(hash_builder, &x.0));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
let hash_builder = &self.hash_builder;
self.table
.try_reserve(additional, |x| make_hash(hash_builder, &x.0))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
let hash_builder = &self.hash_builder;
self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
let hash_builder = &self.hash_builder;
self.table
.shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
Entry::Occupied(OccupiedEntry {
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_key_value(k).map(|(_, v)| v)
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe { &mut item.as_mut().1 })
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(&x.0)) {
Some(mem::replace(&mut item.as_mut().1, v))
} else {
let hash_builder = &self.hash_builder;
self.table
.insert(hash, (k, v), |x| make_hash(hash_builder, &x.0));
None
}
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.remove_entry(k).map(|(_, v)| v)
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// # fn main() {
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(x.0.borrow())) {
self.table.erase_no_drop(&item);
Some(item.read())
} else {
None
}
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
// Erase the element from the table first since drop might panic.
self.table.erase_no_drop(&item);
item.drop();
}
}
}
}
/// Retains only the elements specified by the predicate, and returns an iterator over the
/// removed items.
///
/// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `false` out
/// into another iterator.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// let drained = map.drain_filter(|&k, _| k % 2 == 0);
/// assert_eq!(drained.count(), 4);
/// assert_eq!(map.len(), 4);
/// ```
pub fn drain_filter<'a, F>(&'a mut self, mut f: F) -> impl Iterator<Item=(K,V)> + '_
where
F: 'a + FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
self.table.iter().filter_map(move |item| {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
// Erase the element from the table first since drop might panic.
self.table.erase_no_drop(&item);
Some(item.read())
} else {
None
}
})
}
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S> PartialEq for HashMap<K, V, S>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S> Eq for HashMap<K, V, S>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
{
}
impl<K, V, S> Debug for HashMap<K, V, S>
where
K: Debug,
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S> Default for HashMap<K, V, S>
where
S: Default,
{
/// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher(Default::default())
}
}
impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V> {
inner: RawIntoIter<(K, V)>,
}
impl<K, V> IntoIter<K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V> {
inner: RawDrain<'a, (K, V)>,
}
impl<K, V> Drain<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S> {
map: &'a mut HashMap<K, V, S>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V)>,
}
unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
where
K: Send,
V: Send,
{
}
unsafe impl<K, V> Sync for RawOccupiedEntryMut<'_, K, V>
where
K: Sync,
V: Sync,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S> {
table: &'a mut RawTable<(K, V)>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S> {
map: &'a HashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.map
.table
.find(hash, |(k, _)| is_match(k))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
}
impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
}
impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
self.insert_with_hasher(hash, key, value, |k| make_hash(hash_builder, k))
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
unsafe {
let elem = self.table.insert(hash, (key, value), |x| hasher(&x.0));
let &mut (ref mut k, ref mut v) = elem.as_mut();
(k, v)
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(hasher.finish(), (key, value), |k| {
make_hash(hash_builder, &k.0)
});
RawOccupiedEntryMut {
elem,
table: self.table,
}
}
}
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S> {
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S>),
}
impl<K: Debug, V: Debug, S> Debug for Entry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S> {
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S>,
}
unsafe impl<K, V, S> Send for OccupiedEntry<'_, K, V, S>
where
K: Send,
V: Send,
S: Send,
{
}
unsafe impl<K, V, S> Sync for OccupiedEntry<'_, K, V, S>
where
K: Sync,
V: Sync,
S: Sync,
{
}
impl<K: Debug, V: Debug, S> Debug for OccupiedEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S>,
}
impl<K: Debug, V, S> Debug for VacantEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S> IntoIterator for HashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_ref();
(&r.0, &r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_mut();
(&r.0, &mut r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IntoIter<K, V> {}
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
self.inner.next().map(|(k, _)| k)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Drain<'_, K, V> {}
impl<K, V> fmt::Debug for Drain<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S> Entry<'a, K, V, S> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
}
impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
}
impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let bucket = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
unsafe { &mut bucket.as_mut().1 }
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let elem = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
OccupiedEntry {
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher + Default,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {
v
}
fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
use crate::CollectionAllocErr::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: takes too long
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
fn test_drain_filter() {
let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
let drained = map.drain_filter(|&k, _| k % 2 == 0);
assert_eq!(drained.count(), 4);
assert_eq!(map.len(), 4);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocErr { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocErr { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
}
Updated to fix clippy
Fixed clippy lint (added tab and spaced out `=(K,V)` => `= (K, V)`).
Also made the test more explicit.
Edit:
I've finally installed clippy rather than checking Travis.
use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable};
use crate::CollectionAllocErr;
use core::borrow::Borrow;
use core::fmt::{self, Debug};
use core::hash::{BuildHasher, Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ops::Index;
/// Default hasher for `HashMap`.
#[cfg(feature = "ahash")]
pub type DefaultHashBuilder = ahash::ABuildHasher;
/// Dummy default hasher for `HashMap`.
#[cfg(not(feature = "ahash"))]
pub enum DefaultHashBuilder {}
/// A hash map implemented with quadratic probing and SIMD lookup.
///
/// The default hashing algorithm is currently [`AHash`], though this is
/// subject to change at any point in the future. This hash function is very
/// fast for all types of keys, but this algorithm will typically *not* protect
/// against attacks such as HashDoS.
///
/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
///
/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
/// If you implement these yourself, it is important that the following
/// property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
/// It is a logic error for a key to be modified in such a way that the key's
/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
/// the [`Eq`] trait, changes while it is in the map. This is normally only
/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
/// It is also a logic error for the [`Hash`] implementation of a key to panic.
/// This is generally only possible if the trait is implemented manually. If a
/// panic does occur then the contents of the `HashMap` may become corrupted and
/// some items may be dropped from the table.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashMap<String, String>` in this example).
/// let mut book_reviews = HashMap::new();
///
/// // Review some books.
/// book_reviews.insert(
/// "Adventures of Huckleberry Finn".to_string(),
/// "My favorite book.".to_string(),
/// );
/// book_reviews.insert(
/// "Grimms' Fairy Tales".to_string(),
/// "Masterpiece.".to_string(),
/// );
/// book_reviews.insert(
/// "Pride and Prejudice".to_string(),
/// "Very enjoyable.".to_string(),
/// );
/// book_reviews.insert(
/// "The Adventures of Sherlock Holmes".to_string(),
/// "Eye lyked it alot.".to_string(),
/// );
///
/// // Check for a specific one.
/// // When collections store owned values (String), they can still be
/// // queried using references (&str).
/// if !book_reviews.contains_key("Les Misérables") {
/// println!("We've got {} reviews, but Les Misérables ain't one.",
/// book_reviews.len());
/// }
///
/// // oops, this review has a lot of spelling mistakes, let's delete it.
/// book_reviews.remove("The Adventures of Sherlock Holmes");
///
/// // Look up the values associated with some keys.
/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
/// for &book in &to_find {
/// match book_reviews.get(book) {
/// Some(review) => println!("{}: {}", book, review),
/// None => println!("{} is unreviewed.", book)
/// }
/// }
///
/// // Look up the value for a key (will panic if the key is not found).
/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
///
/// // Iterate over everything.
/// for (book, review) in &book_reviews {
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
///
/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
/// for more complex methods of getting, setting, updating and removing keys and
/// their values:
///
/// ```
/// use hashbrown::HashMap;
///
/// // type inference lets us omit an explicit type signature (which
/// // would be `HashMap<&str, u8>` in this example).
/// let mut player_stats = HashMap::new();
///
/// fn random_stat_buff() -> u8 {
/// // could actually return some random value here - let's just return
/// // some fixed value for now
/// 42
/// }
///
/// // insert a key only if it doesn't already exist
/// player_stats.entry("health").or_insert(100);
///
/// // insert a key using a function that provides a new value only if it
/// // doesn't already exist
/// player_stats.entry("defence").or_insert_with(random_stat_buff);
///
/// // update a key, guarding against the key possibly not being set
/// let stat = player_stats.entry("attack").or_insert(100);
/// *stat += random_stat_buff();
/// ```
///
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
/// [`default`]: #method.default
/// [`with_hasher`]: #method.with_hasher
/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
/// [`AHash`]: https://crates.io/crates/ahash
///
/// ```
/// use hashbrown::HashMap;
///
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking {
/// name: String,
/// country: String,
/// }
///
/// impl Viking {
/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
/// }
///
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in &vikings {
/// println!("{:?} has {} hp", viking, health);
/// }
/// ```
///
/// A `HashMap` with fixed list of elements can be initialized from an array:
///
/// ```
/// use hashbrown::HashMap;
///
/// fn main() {
/// let timber_resources: HashMap<&str, i32> =
/// [("Norway", 100),
/// ("Denmark", 50),
/// ("Iceland", 10)]
/// .iter().cloned().collect();
/// // use the values stored in map
/// }
/// ```
#[derive(Clone)]
pub struct HashMap<K, V, S = DefaultHashBuilder> {
pub(crate) hash_builder: S,
pub(crate) table: RawTable<(K, V)>,
}
#[cfg_attr(feature = "inline-more", inline)]
pub(crate) fn make_hash<K: Hash + ?Sized>(hash_builder: &impl BuildHasher, val: &K) -> u64 {
let mut state = hash_builder.build_hasher();
val.hash(&mut state);
state.finish()
}
#[cfg(feature = "ahash")]
impl<K, V> HashMap<K, V, DefaultHashBuilder> {
/// Creates an empty `HashMap`.
///
/// The hash map is initially created with a capacity of 0, so it will not allocate until it
/// is first inserted into.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `HashMap` with the specified capacity.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates an empty `HashMap` which will use the given hash builder to hash
/// keys.
///
/// The created map has the default initial capacity.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_hasher(hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::new(),
}
}
/// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
/// to hash the keys.
///
/// The hash map will be able to hold at least `capacity` elements without
/// reallocating. If `capacity` is 0, the hash map will not allocate.
///
/// Warning: `hash_builder` is normally randomly generated, and
/// is designed to allow HashMaps to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let s = DefaultHashBuilder::default();
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
Self {
hash_builder,
table: RawTable::with_capacity(capacity),
}
}
/// Returns a reference to the map's [`BuildHasher`].
///
/// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::DefaultHashBuilder;
///
/// let hasher = DefaultHashBuilder::default();
/// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &DefaultHashBuilder = map.hasher();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn hasher(&self) -> &S {
&self.hash_builder
}
/// Returns the number of elements the map can hold without reallocating.
///
/// This number is a lower bound; the `HashMap<K, V>` might be able to hold
/// more, but is guaranteed to be able to hold at least this many.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn capacity(&self) -> usize {
self.table.capacity()
}
/// An iterator visiting all keys in arbitrary order.
/// The iterator element type is `&'a K`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn keys(&self) -> Keys<'_, K, V> {
Keys { inner: self.iter() }
}
/// An iterator visiting all values in arbitrary order.
/// The iterator element type is `&'a V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values(&self) -> Values<'_, K, V> {
Values { inner: self.iter() }
}
/// An iterator visiting all values mutably in arbitrary order.
/// The iterator element type is `&'a mut V`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for val in map.values_mut() {
/// *val = *val + 10;
/// }
///
/// for val in map.values() {
/// println!("{}", val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
ValuesMut {
inner: self.iter_mut(),
}
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// The iterator element type is `(&'a K, &'a V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, val) in map.iter() {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter(&self) -> Iter<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Iter {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// The iterator element type is `(&'a K, &'a mut V)`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Update all values
/// for (_, val) in map.iter_mut() {
/// *val *= 2;
/// }
///
/// for (key, val) in &map {
/// println!("key: {} val: {}", key, val);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
IterMut {
inner: self.table.iter(),
marker: PhantomData,
}
}
}
#[cfg(test)]
#[cfg_attr(feature = "inline-more", inline)]
fn raw_capacity(&self) -> usize {
self.table.buckets()
}
/// Returns the number of elements in the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn len(&self) -> usize {
self.table.len()
}
/// Returns `true` if the map contains no elements.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
/// assert!(v == "a" || v == "b");
/// }
///
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn drain(&mut self) -> Drain<'_, K, V> {
// Here we tie the lifetime of self to the iter.
unsafe {
Drain {
inner: self.table.drain(),
}
}
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn clear(&mut self) {
self.table.clear();
}
}
impl<K, V, S> HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashMap`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows [`usize`].
///
/// [`usize`]: https://doc.rust-lang.org/std/primitive.usize.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn reserve(&mut self, additional: usize) {
let hash_builder = &self.hash_builder;
self.table
.reserve(additional, |x| make_hash(hash_builder, &x.0));
}
/// Tries to reserve capacity for at least `additional` more elements to be inserted
/// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Errors
///
/// If the capacity overflows, or the allocator reports a failure, then an error
/// is returned.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// let mut map: HashMap<&str, isize> = HashMap::new();
/// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
let hash_builder = &self.hash_builder;
self.table
.try_reserve(additional, |x| make_hash(hash_builder, &x.0))
}
/// Shrinks the capacity of the map as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to_fit();
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to_fit(&mut self) {
let hash_builder = &self.hash_builder;
self.table.shrink_to(0, |x| make_hash(hash_builder, &x.0));
}
/// Shrinks the capacity of the map with a lower limit. It will drop
/// down no lower than the supplied limit while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// This function does nothing if the current capacity is smaller than the
/// supplied minimum capacity.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 10);
/// map.shrink_to(0);
/// assert!(map.capacity() >= 2);
/// map.shrink_to(10);
/// assert!(map.capacity() >= 2);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn shrink_to(&mut self, min_capacity: usize) {
let hash_builder = &self.hash_builder;
self.table
.shrink_to(min_capacity, |x| make_hash(hash_builder, &x.0));
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut letters = HashMap::new();
///
/// for ch in "a short treatise on fungi".chars() {
/// let counter = letters.entry(ch).or_insert(0);
/// *counter += 1;
/// }
///
/// assert_eq!(letters[&'s'], 2);
/// assert_eq!(letters[&'t'], 3);
/// assert_eq!(letters[&'u'], 1);
/// assert_eq!(letters.get(&'y'), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> {
let hash = make_hash(&self.hash_builder, &key);
if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) {
Entry::Occupied(OccupiedEntry {
key: Some(key),
elem,
table: self,
})
} else {
Entry::Vacant(VacantEntry {
hash,
key,
table: self,
})
}
}
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
#[inline]
pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get_key_value(k).map(|(_, v)| v)
}
/// Returns the key-value pair corresponding to the supplied key.
///
/// The supplied key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
/// assert_eq!(map.get_key_value(&2), None);
/// ```
#[inline]
pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.get(k).is_some()
}
/// Returns a mutable reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
/// }
/// assert_eq!(map[&1], "b");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
let hash = make_hash(&self.hash_builder, k);
self.table
.find(hash, |x| k.eq(x.0.borrow()))
.map(|item| unsafe { &mut item.as_mut().1 })
}
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, [`None`] is returned.
///
/// If the map did have this key present, the value is updated, and the old
/// value is returned. The key is not updated, though; this matters for
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
/// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
/// [module-level documentation]: index.html#insert-and-complex-keys
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
/// assert_eq!(map.insert(37, "c"), Some("b"));
/// assert_eq!(map[&37], "c");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(&x.0)) {
Some(mem::replace(&mut item.as_mut().1, v))
} else {
let hash_builder = &self.hash_builder;
self.table
.insert(hash, (k, v), |x| make_hash(hash_builder, &x.0));
None
}
}
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.remove_entry(k).map(|(_, v)| v)
}
/// Removes a key from the map, returning the stored key and value if the
/// key was previously in the map.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// # fn main() {
/// let mut map = HashMap::new();
/// map.insert(1, "a");
/// assert_eq!(map.remove_entry(&1), Some((1, "a")));
/// assert_eq!(map.remove(&1), None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
unsafe {
let hash = make_hash(&self.hash_builder, &k);
if let Some(item) = self.table.find(hash, |x| k.eq(x.0.borrow())) {
self.table.erase_no_drop(&item);
Some(item.read())
} else {
None
}
}
}
/// Retains only the elements specified by the predicate.
///
/// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
for item in self.table.iter() {
let &mut (ref key, ref mut value) = item.as_mut();
if !f(key, value) {
// Erase the element from the table first since drop might panic.
self.table.erase_no_drop(&item);
item.drop();
}
}
}
}
/// Drains elements which are false under the given predicate,
/// and returns an iterator over the removed items.
///
/// In other words, move all pairs `(k, v)` such that `f(&k,&mut v)` returns `false` out
/// into another iterator.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// let drained = map.drain_filter(|&k, _| k % 2 == 0);
/// assert_eq!(drained.count(), 4);
/// assert_eq!(map.len(), 4);
/// ```
pub fn drain_filter<'a, F>(&'a mut self, mut f: F) -> impl Iterator<Item = (K, V)> + '_
where
F: 'a + FnMut(&K, &mut V) -> bool,
{
// Here we only use `iter` as a temporary, preventing use-after-free
unsafe {
self.table.iter().filter_map(move |item| {
let &mut (ref key, ref mut value) = item.as_mut();
if f(key, value) {
None
} else {
self.table.erase_no_drop(&item);
Some(item.read())
}
})
}
}
}
impl<K, V, S> HashMap<K, V, S> {
/// Creates a raw entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched. After this, insertions into a vacant entry
/// still require an owned key to be provided.
///
/// Raw entries are useful for such exotic situations as:
///
/// * Hash memoization
/// * Deferring the creation of an owned key until it is known to be required
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Because raw entries provide much more low-level control, it's much easier
/// to put the HashMap into an inconsistent state which, while memory-safe,
/// will cause the map to produce seemingly random results. Higher-level and
/// more foolproof APIs like `entry` should be preferred when possible.
///
/// In particular, the hash used to initialized the raw entry must still be
/// consistent with the hash of the key that is ultimately stored in the entry.
/// This is because implementations of HashMap may need to recompute hashes
/// when resizing, at which point only the keys are available.
///
/// Raw entries give mutable access to the keys. This must not be used
/// to modify how the key would compare or hash, as the map will not re-evaluate
/// where the key should go, meaning the keys may become "lost" if their
/// location does not reflect their state. For instance, if you change a key
/// so that the map now contains keys which compare equal, search may start
/// acting erratically, with two keys randomly masking each other. Implementations
/// are free to assume this doesn't happen (within the limits of memory-safety).
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> {
RawEntryBuilderMut { map: self }
}
/// Creates a raw immutable entry builder for the HashMap.
///
/// Raw entries provide the lowest level of control for searching and
/// manipulating a map. They must be manually initialized with a hash and
/// then manually searched.
///
/// This is useful for
/// * Hash memoization
/// * Using a search key that doesn't work with the Borrow trait
/// * Using custom comparison logic without newtype wrappers
///
/// Unless you are in such a situation, higher-level and more foolproof APIs like
/// `get` should be preferred.
///
/// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
#[cfg_attr(feature = "inline-more", inline)]
pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> {
RawEntryBuilder { map: self }
}
}
impl<K, V, S> PartialEq for HashMap<K, V, S>
where
K: Eq + Hash,
V: PartialEq,
S: BuildHasher,
{
fn eq(&self, other: &Self) -> bool {
if self.len() != other.len() {
return false;
}
self.iter()
.all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
}
}
impl<K, V, S> Eq for HashMap<K, V, S>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
{
}
impl<K, V, S> Debug for HashMap<K, V, S>
where
K: Debug,
V: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, S> Default for HashMap<K, V, S>
where
S: Default,
{
/// Creates an empty `HashMap<K, V, S>`, with the `Default` value for the hasher.
#[cfg_attr(feature = "inline-more", inline)]
fn default() -> Self {
Self::with_hasher(Default::default())
}
}
impl<K, Q: ?Sized, V, S> Index<&Q> for HashMap<K, V, S>
where
K: Eq + Hash + Borrow<Q>,
Q: Eq + Hash,
S: BuildHasher,
{
type Output = V;
/// Returns a reference to the value corresponding to the supplied key.
///
/// # Panics
///
/// Panics if the key is not present in the `HashMap`.
#[cfg_attr(feature = "inline-more", inline)]
fn index(&self, key: &Q) -> &V {
self.get(key).expect("no entry found for key")
}
}
/// An iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter`]: struct.HashMap.html#method.iter
/// [`HashMap`]: struct.HashMap.html
pub struct Iter<'a, K, V> {
inner: RawIter<(K, V)>,
marker: PhantomData<(&'a K, &'a V)>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A mutable iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
/// [`HashMap`]: struct.HashMap.html
pub struct IterMut<'a, K, V> {
inner: RawIter<(K, V)>,
// To ensure invariance with respect to V
marker: PhantomData<(&'a K, &'a mut V)>,
}
// We override the default Send impl which has K: Sync instead of K: Send. Both
// are correct, but this one is more general since it allows keys which
// implement Send but not Sync.
unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
impl<K, V> IterMut<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.clone(),
marker: PhantomData,
}
}
}
/// An owning iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
/// [`into_iter`]: struct.HashMap.html#method.into_iter
/// [`HashMap`]: struct.HashMap.html
pub struct IntoIter<K, V> {
inner: RawIntoIter<(K, V)>,
}
impl<K, V> IntoIter<K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// An iterator over the keys of a `HashMap`.
///
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`keys`]: struct.HashMap.html#method.keys
/// [`HashMap`]: struct.HashMap.html
pub struct Keys<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Keys {
inner: self.inner.clone(),
}
}
}
impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// An iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values`]: struct.HashMap.html#method.values
/// [`HashMap`]: struct.HashMap.html
pub struct Values<'a, K, V> {
inner: Iter<'a, K, V>,
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
impl<K, V> Clone for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn clone(&self) -> Self {
Values {
inner: self.inner.clone(),
}
}
}
impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.clone()).finish()
}
}
/// A draining iterator over the entries of a `HashMap`.
///
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`drain`]: struct.HashMap.html#method.drain
/// [`HashMap`]: struct.HashMap.html
pub struct Drain<'a, K, V> {
inner: RawDrain<'a, (K, V)>,
}
impl<K, V> Drain<'_, K, V> {
/// Returns a iterator of references over the remaining items.
#[cfg_attr(feature = "inline-more", inline)]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
Iter {
inner: self.inner.iter(),
marker: PhantomData,
}
}
}
/// A mutable iterator over the values of a `HashMap`.
///
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
/// [`values_mut`]: struct.HashMap.html#method.values_mut
/// [`HashMap`]: struct.HashMap.html
pub struct ValuesMut<'a, K, V> {
inner: IterMut<'a, K, V>,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
pub struct RawEntryBuilderMut<'a, K, V, S> {
map: &'a mut HashMap<K, V, S>,
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This is a lower-level version of [`Entry`].
///
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
pub enum RawEntryMut<'a, K, V, S> {
/// An occupied entry.
Occupied(RawOccupiedEntryMut<'a, K, V>),
/// A vacant entry.
Vacant(RawVacantEntryMut<'a, K, V, S>),
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawOccupiedEntryMut<'a, K, V> {
elem: Bucket<(K, V)>,
table: &'a mut RawTable<(K, V)>,
}
unsafe impl<K, V> Send for RawOccupiedEntryMut<'_, K, V>
where
K: Send,
V: Send,
{
}
unsafe impl<K, V> Sync for RawOccupiedEntryMut<'_, K, V>
where
K: Sync,
V: Sync,
{
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
///
/// [`RawEntryMut`]: enum.RawEntryMut.html
pub struct RawVacantEntryMut<'a, K, V, S> {
table: &'a mut RawTable<(K, V)>,
hash_builder: &'a S,
}
/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
pub struct RawEntryBuilder<'a, K, V, S> {
map: &'a HashMap<K, V, S>,
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
where
K: Borrow<Q>,
Q: Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
}
impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> {
/// Creates a `RawEntryMut` from the given hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
self.search(hash, is_match)
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S>
where
for<'b> F: FnMut(&'b K) -> bool,
{
match self.map.table.find(hash, |(k, _)| is_match(k)) {
Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
elem,
table: &mut self.map.table,
}),
None => RawEntryMut::Vacant(RawVacantEntryMut {
table: &mut self.map.table,
hash_builder: &self.map.hash_builder,
}),
}
}
}
impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> {
/// Access an entry by key.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
where
S: BuildHasher,
K: Borrow<Q>,
Q: Hash + Eq,
{
let mut hasher = self.map.hash_builder.build_hasher();
k.hash(&mut hasher);
self.from_key_hashed_nocheck(hasher.finish(), k)
}
/// Access an entry by a key and its hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
where
K: Borrow<Q>,
Q: Hash + Eq,
{
self.from_hash(hash, |q| q.borrow().eq(k))
}
#[cfg_attr(feature = "inline-more", inline)]
fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.map
.table
.find(hash, |(k, _)| is_match(k))
.map(|item| unsafe {
let &(ref key, ref value) = item.as_ref();
(key, value)
})
}
/// Access an entry by hash.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::wrong_self_convention)]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where
F: FnMut(&K) -> bool,
{
self.search(hash, is_match)
}
}
impl<'a, K, V, S> RawEntryMut<'a, K, V, S> {
/// Sets the value of the entry, and returns a RawOccupiedEntryMut.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
///
/// assert_eq!(entry.remove_entry(), ("horseyland", 37));
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(mut entry) => {
entry.insert(value);
entry
}
RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns mutable references to the key and value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
///
/// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
/// ("poneyland", "hoho".to_string())
/// });
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where
F: FnOnce() -> (K, V),
K: Hash,
S: BuildHasher,
{
match self {
RawEntryMut::Occupied(entry) => entry.into_key_value(),
RawEntryMut::Vacant(entry) => {
let (k, v) = default();
entry.insert(k, v)
}
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.raw_entry_mut()
/// .from_key("poneyland")
/// .and_modify(|_k, v| { *v += 1 })
/// .or_insert("poneyland", 0);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut K, &mut V),
{
match self {
RawEntryMut::Occupied(mut entry) => {
{
let (k, v) = entry.get_key_value_mut();
f(k, v);
}
RawEntryMut::Occupied(entry)
}
RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
}
}
}
impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> {
/// Gets a reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Gets a mutable reference to the key in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn key_mut(&mut self) -> &mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Converts the entry into a mutable reference to the key in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> &'a mut K {
unsafe { &mut self.elem.as_mut().0 }
}
/// Gets a reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a mutable reference to the value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Gets a reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value(&mut self) -> (&K, &V) {
unsafe {
let &(ref key, ref value) = self.elem.as_ref();
(key, value)
}
}
/// Gets a mutable reference to the key and value in the entry.
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
/// with a lifetime bound to the map itself.
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
unsafe {
let &mut (ref mut key, ref mut value) = self.elem.as_mut();
(key, value)
}
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, value: V) -> V {
mem::replace(self.get_mut(), value)
}
/// Sets the value of the entry, and returns the entry's old value.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_key(&mut self, key: K) -> K {
mem::replace(self.key_mut(), key)
}
/// Takes the value out of the entry, and returns it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Take the ownership of the key and value from the map.
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
}
impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
self.insert_hashed_nocheck(hasher.finish(), key, value)
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[cfg_attr(feature = "inline-more", inline)]
#[allow(clippy::shadow_unrelated)]
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
self.insert_with_hasher(hash, key, value, |k| make_hash(hash_builder, k))
}
/// Set the value of an entry with a custom hasher function.
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert_with_hasher<H>(
self,
hash: u64,
key: K,
value: V,
hasher: H,
) -> (&'a mut K, &'a mut V)
where
H: Fn(&K) -> u64,
{
unsafe {
let elem = self.table.insert(hash, (key, value), |x| hasher(&x.0));
let &mut (ref mut k, ref mut v) = elem.as_mut();
(k, v)
}
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = self.hash_builder;
let mut hasher = self.hash_builder.build_hasher();
key.hash(&mut hasher);
let elem = self.table.insert(hasher.finish(), (key, value), |k| {
make_hash(hash_builder, &k.0)
});
RawOccupiedEntryMut {
elem,
table: self.table,
}
}
}
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
impl<K: Debug, V: Debug, S> Debug for RawEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
}
}
}
impl<K: Debug, V: Debug> Debug for RawOccupiedEntryMut<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawVacantEntryMut").finish()
}
}
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RawEntryBuilder").finish()
}
}
/// A view into a single entry in a map, which may either be vacant or occupied.
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
/// [`HashMap`]: struct.HashMap.html
/// [`entry`]: struct.HashMap.html#method.entry
pub enum Entry<'a, K, V, S> {
/// An occupied entry.
Occupied(OccupiedEntry<'a, K, V, S>),
/// A vacant entry.
Vacant(VacantEntry<'a, K, V, S>),
}
impl<K: Debug, V: Debug, S> Debug for Entry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct OccupiedEntry<'a, K, V, S> {
key: Option<K>,
elem: Bucket<(K, V)>,
table: &'a mut HashMap<K, V, S>,
}
unsafe impl<K, V, S> Send for OccupiedEntry<'_, K, V, S>
where
K: Send,
V: Send,
S: Send,
{
}
unsafe impl<K, V, S> Sync for OccupiedEntry<'_, K, V, S>
where
K: Sync,
V: Sync,
S: Sync,
{
}
impl<K: Debug, V: Debug, S> Debug for OccupiedEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish()
}
}
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`Entry`] enum.
///
/// [`Entry`]: enum.Entry.html
pub struct VacantEntry<'a, K, V, S> {
hash: u64,
key: K,
table: &'a mut HashMap<K, V, S>,
}
impl<K: Debug, V, S> Debug for VacantEntry<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<'a, K, V, S> IntoIterator for &'a HashMap<K, V, S> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> Iter<'a, K, V> {
self.iter()
}
}
impl<'a, K, V, S> IntoIterator for &'a mut HashMap<K, V, S> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IterMut<'a, K, V> {
self.iter_mut()
}
}
impl<K, V, S> IntoIterator for HashMap<K, V, S> {
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // Not possible with .iter()
/// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
#[cfg_attr(feature = "inline-more", inline)]
fn into_iter(self) -> IntoIter<K, V> {
IntoIter {
inner: self.table.into_iter(),
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_ref();
(&r.0, &r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Iter<'_, K, V> {}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
self.inner.next().map(|x| unsafe {
let r = x.as_mut();
(&r.0, &mut r.1)
})
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IterMut<'_, K, V> {}
impl<K, V> fmt::Debug for IterMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for IntoIter<K, V> {}
impl<K: Debug, V: Debug> fmt::Debug for IntoIter<K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a K> {
self.inner.next().map(|(k, _)| k)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Keys<'_, K, V> {}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Values<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Values<'_, K, V> {}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<&'a mut V> {
self.inner.next().map(|(_, v)| v)
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
impl<K, V> fmt::Debug for ValuesMut<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter()).finish()
}
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
#[cfg_attr(feature = "inline-more", inline)]
fn next(&mut self) -> Option<(K, V)> {
self.inner.next()
}
#[cfg_attr(feature = "inline-more", inline)]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<K, V> ExactSizeIterator for Drain<'_, K, V> {
#[cfg_attr(feature = "inline-more", inline)]
fn len(&self) -> usize {
self.inner.len()
}
}
impl<K, V> FusedIterator for Drain<'_, K, V> {}
impl<K, V> fmt::Debug for Drain<'_, K, V>
where
K: fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<'a, K, V, S> Entry<'a, K, V, S> {
/// Sets the value of the entry, and returns an OccupiedEntry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// let entry = map.entry("horseyland").insert(37);
///
/// assert_eq!(entry.key(), &"horseyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(mut entry) => {
entry.insert(value);
entry
}
Entry::Vacant(entry) => entry.insert_entry(value),
}
}
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland").or_insert(3);
/// assert_eq!(map["poneyland"], 3);
///
/// *map.entry("poneyland").or_insert(10) *= 2;
/// assert_eq!(map["poneyland"], 6);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert(self, default: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default),
}
}
/// Ensures a value is in the entry by inserting the result of the default function if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, String> = HashMap::new();
/// let s = "hoho".to_string();
///
/// map.entry("poneyland").or_insert_with(|| s);
///
/// assert_eq!(map["poneyland"], "hoho".to_string());
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(default()),
}
}
/// Returns a reference to this entry's key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
match *self {
Entry::Occupied(ref entry) => entry.key(),
Entry::Vacant(ref entry) => entry.key(),
}
}
/// Provides in-place mutable access to an occupied entry before any
/// potential inserts into the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 42);
///
/// map.entry("poneyland")
/// .and_modify(|e| { *e += 1 })
/// .or_insert(42);
/// assert_eq!(map["poneyland"], 43);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
match self {
Entry::Occupied(mut entry) => {
f(entry.get_mut());
Entry::Occupied(entry)
}
Entry::Vacant(entry) => Entry::Vacant(entry),
}
}
}
impl<'a, K, V: Default, S> Entry<'a, K, V, S> {
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
///
/// # Examples
///
/// ```
/// # fn main() {
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
/// map.entry("poneyland").or_default();
///
/// assert_eq!(map["poneyland"], None);
/// # }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn or_default(self) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
match self {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => entry.insert(Default::default()),
}
}
}
impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> {
/// Gets a reference to the key in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
unsafe { &self.elem.as_ref().0 }
}
/// Take the ownership of the key and value from the map.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// // We delete the entry from the map.
/// o.remove_entry();
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove_entry(self) -> (K, V) {
unsafe {
self.table.table.erase_no_drop(&self.elem);
self.elem.read()
}
}
/// Gets a reference to the value in the entry.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.get(), &12);
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get(&self) -> &V {
unsafe { &self.elem.as_ref().1 }
}
/// Gets a mutable reference to the value in the entry.
///
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// *o.get_mut() += 10;
/// assert_eq!(*o.get(), 22);
///
/// // We can use the same Entry multiple times.
/// *o.get_mut() += 2;
/// }
///
/// assert_eq!(map["poneyland"], 24);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn get_mut(&mut self) -> &mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself.
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
/// [`get_mut`]: #method.get_mut
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// assert_eq!(map["poneyland"], 12);
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// *o.into_mut() += 10;
/// }
///
/// assert_eq!(map["poneyland"], 22);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_mut(self) -> &'a mut V {
unsafe { &mut self.elem.as_mut().1 }
}
/// Sets the value of the entry, and returns the entry's old value.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(mut o) = map.entry("poneyland") {
/// assert_eq!(o.insert(15), 12);
/// }
///
/// assert_eq!(map["poneyland"], 15);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
value
}
/// Takes the value out of the entry, and returns it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// map.entry("poneyland").or_insert(12);
///
/// if let Entry::Occupied(o) = map.entry("poneyland") {
/// assert_eq!(o.remove(), 12);
/// }
///
/// assert_eq!(map.contains_key("poneyland"), false);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn remove(self) -> V {
self.remove_entry().1
}
/// Replaces the entry, returning the old key and value. The new key in the hash map will be
/// the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// map.insert(Rc::new("Stringthing".to_string()), 15);
///
/// let my_key = Rc::new("Stringthing".to_string());
///
/// if let Entry::Occupied(entry) = map.entry(my_key) {
/// // Also replace the key with a handle to our other key.
/// let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
/// }
///
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_entry(self, value: V) -> (K, V) {
let entry = unsafe { self.elem.as_mut() };
let old_key = mem::replace(&mut entry.0, self.key.unwrap());
let old_value = mem::replace(&mut entry.1, value);
(old_key, old_value)
}
/// Replaces the key in the hash map with the key used to create this entry.
///
/// # Examples
///
/// ```
/// use hashbrown::hash_map::{Entry, HashMap};
/// use std::rc::Rc;
///
/// let mut map: HashMap<Rc<String>, u32> = HashMap::new();
/// let mut known_strings: Vec<Rc<String>> = Vec::new();
///
/// // Initialise known strings, run program, etc.
///
/// reclaim_memory(&mut map, &known_strings);
///
/// fn reclaim_memory(map: &mut HashMap<Rc<String>, u32>, known_strings: &[Rc<String>] ) {
/// for s in known_strings {
/// if let Entry::Occupied(entry) = map.entry(s.clone()) {
/// // Replaces the entry's key with our version of it in `known_strings`.
/// entry.replace_key();
/// }
/// }
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn replace_key(self) -> K {
let entry = unsafe { self.elem.as_mut() };
mem::replace(&mut entry.0, self.key.unwrap())
}
}
impl<'a, K, V, S> VacantEntry<'a, K, V, S> {
/// Gets a reference to the key that would be used when inserting a value
/// through the `VacantEntry`.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
/// assert_eq!(map.entry("poneyland").key(), &"poneyland");
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn key(&self) -> &K {
&self.key
}
/// Take ownership of the key.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(v) = map.entry("poneyland") {
/// v.into_key();
/// }
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn into_key(self) -> K {
self.key
}
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
///
/// # Examples
///
/// ```
/// use hashbrown::HashMap;
/// use hashbrown::hash_map::Entry;
///
/// let mut map: HashMap<&str, u32> = HashMap::new();
///
/// if let Entry::Vacant(o) = map.entry("poneyland") {
/// o.insert(37);
/// }
/// assert_eq!(map["poneyland"], 37);
/// ```
#[cfg_attr(feature = "inline-more", inline)]
pub fn insert(self, value: V) -> &'a mut V
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let bucket = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
unsafe { &mut bucket.as_mut().1 }
}
#[cfg_attr(feature = "inline-more", inline)]
fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S>
where
K: Hash,
S: BuildHasher,
{
let hash_builder = &self.table.hash_builder;
let elem = self.table.table.insert(self.hash, (self.key, value), |x| {
make_hash(hash_builder, &x.0)
});
OccupiedEntry {
key: None,
elem,
table: self.table,
}
}
}
impl<K, V, S> FromIterator<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher + Default,
{
#[cfg_attr(feature = "inline-more", inline)]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let iter = iter.into_iter();
let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default());
iter.for_each(|(k, v)| {
map.insert(k, v);
});
map
}
}
impl<K, V, S> Extend<(K, V)> for HashMap<K, V, S>
where
K: Eq + Hash,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
// Keys may be already present or show multiple times in the iterator.
// Reserve the entire hint lower bound if the map is empty.
// Otherwise reserve half the hint (rounded up), so the map
// will only resize twice in the worst case.
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
(iter.size_hint().0 + 1) / 2
};
self.reserve(reserve);
iter.for_each(move |(k, v)| {
self.insert(k, v);
});
}
}
impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap<K, V, S>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
{
#[cfg_attr(feature = "inline-more", inline)]
fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
}
#[allow(dead_code)]
fn assert_covariance() {
fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
v
}
fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
v
}
fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
v
}
fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
v
}
fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> {
v
}
fn into_iter_val<'new>(v: IntoIter<u8, &'static str>) -> IntoIter<u8, &'new str> {
v
}
fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
v
}
fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
v
}
fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
v
}
fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
v
}
fn drain<'new>(
d: Drain<'static, &'static str, &'static str>,
) -> Drain<'new, &'new str, &'new str> {
d
}
}
#[cfg(test)]
mod test_map {
use super::DefaultHashBuilder;
use super::Entry::{Occupied, Vacant};
use super::{HashMap, RawEntryMut};
use crate::CollectionAllocErr::*;
use rand::{rngs::SmallRng, Rng, SeedableRng};
use std::cell::RefCell;
use std::usize;
use std::vec::Vec;
#[test]
fn test_zero_capacities() {
type HM = HashMap<i32, i32>;
let m = HM::new();
assert_eq!(m.capacity(), 0);
let m = HM::default();
assert_eq!(m.capacity(), 0);
let m = HM::with_hasher(DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity(0);
assert_eq!(m.capacity(), 0);
let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.insert(1, 1);
m.insert(2, 2);
m.remove(&1);
m.remove(&2);
m.shrink_to_fit();
assert_eq!(m.capacity(), 0);
let mut m = HM::new();
m.reserve(0);
assert_eq!(m.capacity(), 0);
}
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1).is_none());
assert!(m.contains_key(&1));
assert!(!m.contains_key(&0));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&2).unwrap(), 4);
}
#[test]
fn test_clone() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.insert(1, 2).is_none());
assert_eq!(m.len(), 1);
assert!(m.insert(2, 4).is_none());
assert_eq!(m.len(), 2);
let m2 = m.clone();
assert_eq!(*m2.get(&1).unwrap(), 2);
assert_eq!(*m2.get(&2).unwrap(), 4);
assert_eq!(m2.len(), 2);
}
thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
struct Droppable {
k: usize,
}
impl Droppable {
fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
Droppable { k }
}
}
impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
});
}
}
impl Clone for Droppable {
fn clone(&self) -> Self {
Droppable::new(self.k)
}
}
#[test]
fn test_drops() {
DROP_VECTOR.with(|slot| {
*slot.borrow_mut() = vec![0; 200];
});
{
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0..50 {
let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
DROP_VECTOR.with(|v| {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
});
}
DROP_VECTOR.with(|v| {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i + 100], 0);
}
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i + 100], 1);
}
});
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_into_iter_drops() {
DROP_VECTOR.with(|v| {
*v.borrow_mut() = vec![0; 200];
});
let hm = {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0..100 {
let d1 = Droppable::new(i);
let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
hm
};
// By the way, ensure that cloning doesn't screw up the dropping.
drop(hm.clone());
{
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
assert_eq!(nk, 50);
assert_eq!(nv, 50);
});
};
DROP_VECTOR.with(|v| {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
}
#[test]
fn test_empty_remove() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
}
assert!(*m.entry(0).or_insert(true));
assert_eq!(m.len(), 1);
}
#[test]
fn test_empty_iter() {
let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
assert_eq!(m.values_mut().next(), None);
assert_eq!(m.iter().next(), None);
assert_eq!(m.iter_mut().next(), None);
assert_eq!(m.len(), 0);
assert!(m.is_empty());
assert_eq!(m.into_iter().next(), None);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: takes too long
fn test_lots_of_insertions() {
let mut m = HashMap::new();
// Try this a few times to make sure we never screw up the hashmap's
// internal state.
for _ in 0..10 {
assert!(m.is_empty());
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
for j in 1..=i {
let r = m.get(&j);
assert_eq!(r, Some(&j));
}
for j in i + 1..1001 {
let r = m.get(&j);
assert_eq!(r, None);
}
}
for i in 1001..2001 {
assert!(!m.contains_key(&i));
}
// remove forwards
for i in 1..1001 {
assert!(m.remove(&i).is_some());
for j in 1..=i {
assert!(!m.contains_key(&j));
}
for j in i + 1..1001 {
assert!(m.contains_key(&j));
}
}
for i in 1..1001 {
assert!(!m.contains_key(&i));
}
for i in 1..1001 {
assert!(m.insert(i, i).is_none());
}
// remove backwards
for i in (1..1001).rev() {
assert!(m.remove(&i).is_some());
for j in i..1001 {
assert!(!m.contains_key(&j));
}
for j in 1..i {
assert!(m.contains_key(&j));
}
}
}
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12).is_none());
assert!(m.insert(2, 8).is_none());
assert!(m.insert(5, 14).is_none());
let new = 100;
match m.get_mut(&5) {
None => panic!(),
Some(x) => *x = new,
}
assert_eq!(m.get(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(!m.insert(1, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(m.insert(5, 3).is_none());
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&1).unwrap(), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert!(m.insert(5, 3).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert!(m.insert(9, 4).is_none());
assert_eq!(*m.get(&1).unwrap(), 2);
assert_eq!(*m.get(&5).unwrap(), 3);
assert_eq!(*m.get(&9).unwrap(), 4);
assert!(m.remove(&1).is_some());
assert_eq!(*m.get(&9).unwrap(), 4);
assert_eq!(*m.get(&5).unwrap(), 3);
}
#[test]
fn test_is_empty() {
let mut m = HashMap::with_capacity(4);
assert!(m.insert(1, 2).is_none());
assert!(!m.is_empty());
assert!(m.remove(&1).is_some());
assert!(m.is_empty());
}
#[test]
fn test_remove() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove(&1), Some(2));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_remove_entry() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.remove_entry(&1), Some((1, 2)));
assert_eq!(m.remove(&1), None);
}
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0..32 {
assert!(m.insert(i, i * 2).is_none());
}
assert_eq!(m.len(), 32);
let mut observed: u32 = 0;
for (k, v) in &m {
assert_eq!(*v, *k * 2);
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
assert!(keys.contains(&3));
}
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
assert!(values.contains(&'c'));
}
#[test]
fn test_values_mut() {
let vec = vec![(1, 1), (2, 2), (3, 3)];
let mut map: HashMap<_, _> = vec.into_iter().collect();
for value in map.values_mut() {
*value = (*value) * 2
}
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&2));
assert!(values.contains(&4));
assert!(values.contains(&6));
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.get(&1).is_none());
m.insert(1, 2);
match m.get(&1) {
None => panic!(),
Some(v) => assert_eq!(*v, 2),
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_show() {
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
let map_str = format!("{:?}", map);
assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0;
let old_raw_cap = m.raw_capacity();
while old_raw_cap == m.raw_capacity() {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_behavior_resize_policy() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert_eq!(m.raw_capacity(), 1);
assert!(m.is_empty());
m.insert(0, 0);
m.remove(&0);
assert!(m.is_empty());
let initial_raw_cap = m.raw_capacity();
m.reserve(initial_raw_cap);
let raw_cap = m.raw_capacity();
assert_eq!(raw_cap, initial_raw_cap * 2);
let mut i = 0;
for _ in 0..raw_cap * 3 / 4 {
m.insert(i, i);
i += 1;
}
// three quarters full
assert_eq!(m.len(), i);
assert_eq!(m.raw_capacity(), raw_cap);
for _ in 0..raw_cap / 4 {
m.insert(i, i);
i += 1;
}
// half full
let new_raw_cap = m.raw_capacity();
assert_eq!(new_raw_cap, raw_cap * 2);
for _ in 0..raw_cap / 2 - 1 {
i -= 1;
m.remove(&i);
assert_eq!(m.raw_capacity(), new_raw_cap);
}
// A little more than one quarter full.
m.shrink_to_fit();
assert_eq!(m.raw_capacity(), raw_cap);
// again, a little more than half full
for _ in 0..raw_cap / 2 {
i -= 1;
m.remove(&i);
}
m.shrink_to_fit();
assert_eq!(m.len(), i);
assert!(!m.is_empty());
assert_eq!(m.raw_capacity(), initial_raw_cap);
}
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
assert_eq!(m.len(), 100);
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
m.insert(0, 0);
assert_eq!(m.len(), 1);
assert!(m.capacity() >= m.len());
assert_eq!(m.remove(&0), Some(0));
}
#[test]
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
}
}
#[test]
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.size_hint(), (3, Some(3)));
}
#[test]
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
for _ in iter.by_ref().take(3) {}
assert_eq!(iter.len(), 3);
}
#[test]
fn test_index() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
assert_eq!(map[&2], 1);
}
#[test]
#[should_panic]
fn test_index_nonexistent() {
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
map.insert(3, 4);
map[&4];
}
#[test]
fn test_entry() {
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
assert_eq!(map.get(&2).unwrap(), &200);
assert_eq!(map.len(), 6);
// Existing key (take)
match map.entry(3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove(), 30);
}
}
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);
// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(*view.insert(1000), 1000);
}
}
assert_eq!(map.get(&10).unwrap(), &1000);
assert_eq!(map.len(), 6);
}
#[test]
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k), "{} is in keys() but not in the map?", k);
}
}
let mut m = HashMap::new();
let mut rng = {
let seed = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16];
SmallRng::from_seed(seed)
};
// Populate the map with some items.
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for _ in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {}
Occupied(e) => {
e.remove();
}
}
check(&m);
}
}
#[test]
fn test_extend_ref() {
let mut a = HashMap::new();
a.insert(1, "one");
let mut b = HashMap::new();
b.insert(2, "two");
b.insert(3, "three");
a.extend(&b);
assert_eq!(a.len(), 3);
assert_eq!(a[&1], "one");
assert_eq!(a[&2], "two");
assert_eq!(a[&3], "three");
}
#[test]
fn test_capacity_not_less_than_len() {
let mut a = HashMap::new();
let mut item = 0;
for _ in 0..116 {
a.insert(item, 0);
item += 1;
}
assert!(a.capacity() > a.len());
let free = a.capacity() - a.len();
for _ in 0..free {
a.insert(item, 0);
item += 1;
}
assert_eq!(a.len(), a.capacity());
// Insert at capacity should cause allocation.
a.insert(item, 0);
assert!(a.capacity() > a.len());
}
#[test]
fn test_occupied_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
a.insert(key.clone(), value.clone());
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
match a.entry(key.clone()) {
Vacant(_) => panic!(),
Occupied(e) => assert_eq!(key, *e.key()),
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_vacant_entry_key() {
let mut a = HashMap::new();
let key = "hello there";
let value = "value goes here";
assert!(a.is_empty());
match a.entry(key.clone()) {
Occupied(_) => panic!(),
Vacant(e) => {
assert_eq!(key, *e.key());
e.insert(value.clone());
}
}
assert_eq!(a.len(), 1);
assert_eq!(a[key], value);
}
#[test]
fn test_retain() {
let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
assert_eq!(map[&2], 20);
assert_eq!(map[&4], 40);
assert_eq!(map[&6], 60);
}
#[test]
fn test_drain_filter() {
let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
let drained = map.drain_filter(|&k, _| k % 2 == 0);
let mut out = drained.collect::<Vec<_>>();
out.sort_unstable();
assert_eq!(vec![(1, 10), (3, 30), (5, 50), (7, 70)], out);
assert_eq!(map.len(), 4);
}
#[test]
#[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
fn test_try_reserve() {
let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
const MAX_USIZE: usize = usize::MAX;
if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
} else {
panic!("usize::MAX should trigger an overflow!");
}
if let Err(AllocErr { .. }) = empty_bytes.try_reserve(MAX_USIZE / 8) {
} else {
// This may succeed if there is enough free memory. Attempt to
// allocate a second hashmap to ensure the allocation will fail.
let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
if let Err(AllocErr { .. }) = empty_bytes2.try_reserve(MAX_USIZE / 8) {
} else {
panic!("usize::MAX / 8 should trigger an OOM!");
}
}
}
#[test]
fn test_raw_entry() {
use super::RawEntryMut::{Occupied, Vacant};
let xs = [(1i32, 10i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
use core::hash::{BuildHasher, Hash, Hasher};
let mut hasher = map.hasher().build_hasher();
k.hash(&mut hasher);
hasher.finish()
};
// Existing key (insert)
match map.raw_entry_mut().from_key(&1) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
assert_eq!(view.get(), &10);
assert_eq!(view.insert(100), 10);
}
}
let hash1 = compute_hash(&map, 1);
assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
assert_eq!(
map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
(&1, &100)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
(&1, &100)
);
assert_eq!(map.len(), 6);
// Existing key (update)
match map.raw_entry_mut().from_key(&2) {
Vacant(_) => unreachable!(),
Occupied(mut view) => {
let v = view.get_mut();
let new_v = (*v) * 10;
*v = new_v;
}
}
let hash2 = compute_hash(&map, 2);
assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
assert_eq!(
map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
(&2, &200)
);
assert_eq!(
map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
(&2, &200)
);
assert_eq!(map.len(), 6);
// Existing key (take)
let hash3 = compute_hash(&map, 3);
match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
Vacant(_) => unreachable!(),
Occupied(view) => {
assert_eq!(view.remove_entry(), (3, 30));
}
}
assert_eq!(map.raw_entry().from_key(&3), None);
assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
assert_eq!(map.len(), 5);
// Nonexistent key (insert)
match map.raw_entry_mut().from_key(&10) {
Occupied(_) => unreachable!(),
Vacant(view) => {
assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
}
}
assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
assert_eq!(map.len(), 6);
// Ensure all lookup methods produce equivalent results.
for k in 0..12 {
let hash = compute_hash(&map, k);
let v = map.get(&k).cloned();
let kv = v.as_ref().map(|v| (&k, v));
assert_eq!(map.raw_entry().from_key(&k), kv);
assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
match map.raw_entry_mut().from_key(&k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
Occupied(mut o) => assert_eq!(Some(o.get_key_value()), kv),
Vacant(_) => assert_eq!(v, None),
}
}
}
#[test]
fn test_key_without_hash_impl() {
#[derive(Debug)]
struct IntWrapper(u64);
let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
RawEntryMut::Vacant(e) => e,
};
vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
}
{
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
{
let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
RawEntryMut::Occupied(e) => e,
RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
};
occupied_entry.remove();
}
assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
}
}
|
use serial::Serial;
use timer::Timer;
use keypad::Keypad;
use gpu::GPU;
use sound::Sound;
static WRAM_SIZE: uint = 0x8000;
static ZRAM_SIZE: uint = 0x7F;
#[deriving(Eq)]
enum DMAType {
NoDMA,
GDMA,
HDMA,
}
pub struct MMU {
priv wram: ~[u8, ..WRAM_SIZE],
priv zram: ~[u8, ..ZRAM_SIZE],
priv hdma: [u8, ..4],
inte: u8,
intf: u8,
serial: Serial,
timer: Timer,
keypad: Keypad,
gpu: GPU,
sound: Sound,
priv hdma_status: DMAType,
priv hdma_src: u16,
priv hdma_dst: u16,
priv hdma_len: u8,
priv wrambank: u8,
priv mbc: ~::mbc::MBC,
priv gbmode: ::gbmode::GbMode,
}
impl MMU {
pub fn new(romname: &str) -> MMU {
let mut res = MMU {
wram: ~([0, ..WRAM_SIZE]),
zram: ~([0, ..ZRAM_SIZE]),
hdma: [0, ..4],
wrambank: 1,
inte: 0,
intf: 0,
serial: Serial::new(),
timer: Timer::new(),
keypad: Keypad::new(),
gpu: GPU::new(),
sound: Sound::new(),
mbc: ::mbc::get_mbc(&Path::new(romname)),
gbmode: ::gbmode::Classic,
hdma_src: 0,
hdma_dst: 0,
hdma_status: NoDMA,
hdma_len: 0xFF,
};
if res.rb(0x0143) == 0xC0 {
fail!("This game does not work in Classic mode");
}
res.set_initial();
return res
}
pub fn new_cgb(romname: &str) -> MMU {
let mut res = MMU {
wram: ~([0,.. WRAM_SIZE]),
zram: ~([0,.. ZRAM_SIZE]),
wrambank: 1,
hdma: [0,.. 4],
inte: 0,
intf: 0,
serial: Serial::new(),
timer: Timer::new(),
keypad: Keypad::new(),
gpu: GPU::new_cgb(),
sound: Sound::new(),
mbc: ::mbc::get_mbc(&Path::new(romname)),
gbmode: ::gbmode::Color,
hdma_src: 0,
hdma_dst: 0,
hdma_status: NoDMA,
hdma_len: 0xFF,
};
res.determine_mode();
res.set_initial();
return res;
}
fn set_initial(&mut self) {
self.wb(0xFF05, 0);
self.wb(0xFF06, 0);
self.wb(0xFF07, 0);
self.wb(0xFF40, 0x91);
self.wb(0xFF42, 0);
self.wb(0xFF43, 0);
self.wb(0xFF45, 0);
self.wb(0xFF47, 0xFC);
self.wb(0xFF48, 0xFF);
self.wb(0xFF49, 0xFF);
self.wb(0xFF4A, 0);
self.wb(0xFF4B, 0);
}
fn determine_mode(&mut self) {
let mode = match self.rb(0x0143) & 0x80 {
0x80 => ::gbmode::Color,
_ => ::gbmode::ColorAsClassic,
};
self.gbmode = mode;
self.gpu.gbmode = mode;
}
pub fn get_mode(&self) -> ::gbmode::GbMode {
self.gbmode
}
pub fn cycle(&mut self, cputicks: uint) -> uint {
let ticks = cputicks + self.perform_hdma();
self.timer.cycle(ticks);
self.intf |= self.timer.interrupt;
self.timer.interrupt = 0;
self.intf |= self.keypad.interrupt;
self.keypad.interrupt = 0;
self.gpu.cycle(ticks);
self.intf |= self.gpu.interrupt;
self.gpu.interrupt = 0;
return ticks;
}
pub fn rb(&self, address: u16) -> u8 {
match address {
0x0000 .. 0x7FFF => self.mbc.readrom(address),
0x8000 .. 0x9FFF => self.gpu.rb(address),
0xA000 .. 0xBFFF => self.mbc.readram(address),
0xC000 .. 0xCFFF | 0xE000 .. 0xEFFF => self.wram[address & 0x0FFF],
0xD000 .. 0xDFFF | 0xF000 .. 0xFDFF => self.wram[(self.wrambank as u16 * 0x1000) | address & 0x0FFF],
0xFE00 .. 0xFE9F => self.gpu.rb(address),
0xFF00 => self.keypad.rb(),
0xFF01 .. 0xFF02 => self.serial.rb(address),
0xFF04 .. 0xFF07 => self.timer.rb(address),
0xFF0F => self.intf,
0xFF10 .. 0xFF26 => self.sound.rb(address),
0xFF4D => 0,
0xFF40 .. 0xFF4F => self.gpu.rb(address),
0xFF51 .. 0xFF55 => self.hdma_read(address),
0xFF68 .. 0xFF6B => self.gpu.rb(address),
0xFF70 => self.wrambank,
0xFF80 .. 0xFFFE => self.zram[address & 0x007F],
0xFFFF => self.inte,
_ => { warn!("rb not implemented for {:X}", address); 0 },
}
}
pub fn rw(&self, address: u16) -> u16 {
(self.rb(address) as u16) | (self.rb(address + 1) as u16 << 8)
}
pub fn wb(&mut self, address: u16, value: u8) {
match address {
0x0000 .. 0x7FFF => self.mbc.writerom(address, value),
0x8000 .. 0x9FFF => self.gpu.wb(address, value),
0xA000 .. 0xBFFF => self.mbc.writeram(address, value),
0xC000 .. 0xCFFF | 0xE000 .. 0xEFFF => self.wram[address & 0x0FFF] = value,
0xD000 .. 0xDFFF | 0xF000 .. 0xFDFF => self.wram[(self.wrambank as u16 * 0x1000) | address & 0x0FFF] = value,
0xFE00 .. 0xFE9F => self.gpu.wb(address, value),
0xFF00 => self.keypad.wb(value),
0xFF01 .. 0xFF02 => self.serial.wb(address, value),
0xFF04 .. 0xFF07 => self.timer.wb(address, value),
0xFF10 .. 0xFF26 => self.sound.wb(address, value),
0xFF46 => self.oamdma(value),
0xFF4D => { if value & 0x1 == 0x1 { warn!("Speed switch requested but not supported"); } }, // CGB speed switch
0xFF40 .. 0xFF4F => self.gpu.wb(address, value),
0xFF51 .. 0xFF55 => self.hdma_write(address, value),
0xFF68 .. 0xFF6B => self.gpu.wb(address, value),
0xFF0F => self.intf = value,
0xFF70 => { self.wrambank = match value & 0x7 { 0 => 1, n => n }; },
0xFF80 .. 0xFFFE => self.zram[address & 0x007F] = value,
0xFFFF => self.inte = value,
_ => warn!("wb not implemented for {:X}", address),
};
}
pub fn ww(&mut self, address: u16, value: u16) {
self.wb(address, (value & 0xFF) as u8);
self.wb(address + 1, (value >> 8) as u8);
}
fn oamdma(&mut self, value: u8) {
let base = (value as u16) << 8;
for i in range(0u16, 0xA0) {
let b = self.rb(base + i);
self.wb(0xFE00 + i, b);
}
}
fn hdma_read(&self, a: u16) -> u8 {
match a {
0xFF51 .. 0xFF54 => { self.hdma[a - 0xFF51] },
0xFF55 => self.hdma_len | if self.hdma_status == NoDMA { 0x80 } else { 0 },
_ => fail!(),
}
}
fn hdma_write(&mut self, a: u16, v: u8) {
//println!("Writing 0x{:04X} = 0x{:02X}", a, v);
match a {
0xFF51 => self.hdma[0] = v,
0xFF52 => self.hdma[1] = v & 0xF0,
0xFF53 => self.hdma[2] = v & 0x1F,
0xFF54 => self.hdma[3] = v & 0xF0,
0xFF55 => {
//println!("FF55: 0x{:02X}", v);
if self.hdma_status == HDMA {
if v & 0x80 == 0 { self.hdma_status = NoDMA; };
return;
}
let src = (self.hdma[0] as u16 << 8) | (self.hdma[1] as u16);
let dst = (self.hdma[2] as u16 << 8) | (self.hdma[3] as u16) | 0x8000;
if !(src <= 0x7FF0 || (src >= 0xA000 && src <= 0xDFF0)) { fail!("HDMA transfer with illegal start address {:04X}", src); }
self.hdma_src = src;
self.hdma_dst = dst;
self.hdma_len = v & 0x7F;
self.hdma_status =
if v == 0x7F { NoDMA }
else if v & 0x80 == 0x80 { HDMA }
else { GDMA };
//println!("New status: {}", match self.hdma_status { NoDMA => "NoDMA", GDMA => "GDMA", HDMA => "HDMA" });
},
_ => fail!(),
};
}
fn perform_hdma(&mut self) -> uint {
let len: uint = match self.hdma_status {
NoDMA => 0,
GDMA => self.hdma_len as uint + 1,
HDMA => if self.gpu.may_hdma() { 1 } else { 0 },
};
'i: for _i in range(0, len) {
for j in range(0u16, 16) {
let b: u8 = self.rb(self.hdma_src + j);
self.gpu.wb(self.hdma_dst + j, b);
}
self.hdma_src += 0x10;
self.hdma_dst += 0x10;
self.hdma_len -= 1;
if self.hdma_len == 0xFF {
self.hdma_status = NoDMA;
break 'i;
}
}
if len > 0 {
(len * 16) + 1
} else {
0
}
}
}
Refactored VRAM-DMA
use serial::Serial;
use timer::Timer;
use keypad::Keypad;
use gpu::GPU;
use sound::Sound;
static WRAM_SIZE: uint = 0x8000;
static ZRAM_SIZE: uint = 0x7F;
#[deriving(Eq)]
enum DMAType {
NoDMA,
GDMA,
HDMA,
}
pub struct MMU {
priv wram: ~[u8, ..WRAM_SIZE],
priv zram: ~[u8, ..ZRAM_SIZE],
priv hdma: [u8, ..4],
inte: u8,
intf: u8,
serial: Serial,
timer: Timer,
keypad: Keypad,
gpu: GPU,
sound: Sound,
priv hdma_status: DMAType,
priv hdma_src: u16,
priv hdma_dst: u16,
priv hdma_len: u8,
priv wrambank: u8,
priv mbc: ~::mbc::MBC,
priv gbmode: ::gbmode::GbMode,
}
impl MMU {
pub fn new(romname: &str) -> MMU {
let mut res = MMU {
wram: ~([0, ..WRAM_SIZE]),
zram: ~([0, ..ZRAM_SIZE]),
hdma: [0, ..4],
wrambank: 1,
inte: 0,
intf: 0,
serial: Serial::new(),
timer: Timer::new(),
keypad: Keypad::new(),
gpu: GPU::new(),
sound: Sound::new(),
mbc: ::mbc::get_mbc(&Path::new(romname)),
gbmode: ::gbmode::Classic,
hdma_src: 0,
hdma_dst: 0,
hdma_status: NoDMA,
hdma_len: 0xFF,
};
if res.rb(0x0143) == 0xC0 {
fail!("This game does not work in Classic mode");
}
res.set_initial();
return res
}
pub fn new_cgb(romname: &str) -> MMU {
let mut res = MMU {
wram: ~([0,.. WRAM_SIZE]),
zram: ~([0,.. ZRAM_SIZE]),
wrambank: 1,
hdma: [0,.. 4],
inte: 0,
intf: 0,
serial: Serial::new(),
timer: Timer::new(),
keypad: Keypad::new(),
gpu: GPU::new_cgb(),
sound: Sound::new(),
mbc: ::mbc::get_mbc(&Path::new(romname)),
gbmode: ::gbmode::Color,
hdma_src: 0,
hdma_dst: 0,
hdma_status: NoDMA,
hdma_len: 0xFF,
};
res.determine_mode();
res.set_initial();
return res;
}
fn set_initial(&mut self) {
self.wb(0xFF05, 0);
self.wb(0xFF06, 0);
self.wb(0xFF07, 0);
self.wb(0xFF40, 0x91);
self.wb(0xFF42, 0);
self.wb(0xFF43, 0);
self.wb(0xFF45, 0);
self.wb(0xFF47, 0xFC);
self.wb(0xFF48, 0xFF);
self.wb(0xFF49, 0xFF);
self.wb(0xFF4A, 0);
self.wb(0xFF4B, 0);
}
fn determine_mode(&mut self) {
let mode = match self.rb(0x0143) & 0x80 {
0x80 => ::gbmode::Color,
_ => ::gbmode::ColorAsClassic,
};
self.gbmode = mode;
self.gpu.gbmode = mode;
}
pub fn get_mode(&self) -> ::gbmode::GbMode {
self.gbmode
}
pub fn cycle(&mut self, cputicks: uint) -> uint {
let ticks = cputicks + self.perform_vramdma();
self.timer.cycle(ticks);
self.intf |= self.timer.interrupt;
self.timer.interrupt = 0;
self.intf |= self.keypad.interrupt;
self.keypad.interrupt = 0;
self.gpu.cycle(ticks);
self.intf |= self.gpu.interrupt;
self.gpu.interrupt = 0;
return ticks;
}
pub fn rb(&self, address: u16) -> u8 {
match address {
0x0000 .. 0x7FFF => self.mbc.readrom(address),
0x8000 .. 0x9FFF => self.gpu.rb(address),
0xA000 .. 0xBFFF => self.mbc.readram(address),
0xC000 .. 0xCFFF | 0xE000 .. 0xEFFF => self.wram[address & 0x0FFF],
0xD000 .. 0xDFFF | 0xF000 .. 0xFDFF => self.wram[(self.wrambank as u16 * 0x1000) | address & 0x0FFF],
0xFE00 .. 0xFE9F => self.gpu.rb(address),
0xFF00 => self.keypad.rb(),
0xFF01 .. 0xFF02 => self.serial.rb(address),
0xFF04 .. 0xFF07 => self.timer.rb(address),
0xFF0F => self.intf,
0xFF10 .. 0xFF26 => self.sound.rb(address),
0xFF4D => 0,
0xFF40 .. 0xFF4F => self.gpu.rb(address),
0xFF51 .. 0xFF55 => self.hdma_read(address),
0xFF68 .. 0xFF6B => self.gpu.rb(address),
0xFF70 => self.wrambank,
0xFF80 .. 0xFFFE => self.zram[address & 0x007F],
0xFFFF => self.inte,
_ => { warn!("rb not implemented for {:X}", address); 0 },
}
}
pub fn rw(&self, address: u16) -> u16 {
(self.rb(address) as u16) | (self.rb(address + 1) as u16 << 8)
}
pub fn wb(&mut self, address: u16, value: u8) {
match address {
0x0000 .. 0x7FFF => self.mbc.writerom(address, value),
0x8000 .. 0x9FFF => self.gpu.wb(address, value),
0xA000 .. 0xBFFF => self.mbc.writeram(address, value),
0xC000 .. 0xCFFF | 0xE000 .. 0xEFFF => self.wram[address & 0x0FFF] = value,
0xD000 .. 0xDFFF | 0xF000 .. 0xFDFF => self.wram[(self.wrambank as u16 * 0x1000) | address & 0x0FFF] = value,
0xFE00 .. 0xFE9F => self.gpu.wb(address, value),
0xFF00 => self.keypad.wb(value),
0xFF01 .. 0xFF02 => self.serial.wb(address, value),
0xFF04 .. 0xFF07 => self.timer.wb(address, value),
0xFF10 .. 0xFF26 => self.sound.wb(address, value),
0xFF46 => self.oamdma(value),
0xFF4D => { if value & 0x1 == 0x1 { warn!("Speed switch requested but not supported"); } }, // CGB speed switch
0xFF40 .. 0xFF4F => self.gpu.wb(address, value),
0xFF51 .. 0xFF55 => self.hdma_write(address, value),
0xFF68 .. 0xFF6B => self.gpu.wb(address, value),
0xFF0F => self.intf = value,
0xFF70 => { self.wrambank = match value & 0x7 { 0 => 1, n => n }; },
0xFF80 .. 0xFFFE => self.zram[address & 0x007F] = value,
0xFFFF => self.inte = value,
_ => warn!("wb not implemented for {:X}", address),
};
}
pub fn ww(&mut self, address: u16, value: u16) {
self.wb(address, (value & 0xFF) as u8);
self.wb(address + 1, (value >> 8) as u8);
}
fn oamdma(&mut self, value: u8) {
let base = (value as u16) << 8;
for i in range(0u16, 0xA0) {
let b = self.rb(base + i);
self.wb(0xFE00 + i, b);
}
}
fn hdma_read(&self, a: u16) -> u8 {
match a {
0xFF51 .. 0xFF54 => { self.hdma[a - 0xFF51] },
0xFF55 => self.hdma_len | if self.hdma_status == NoDMA { 0x80 } else { 0 },
_ => fail!(),
}
}
fn hdma_write(&mut self, a: u16, v: u8) {
match a {
0xFF51 => self.hdma[0] = v,
0xFF52 => self.hdma[1] = v & 0xF0,
0xFF53 => self.hdma[2] = v & 0x1F,
0xFF54 => self.hdma[3] = v & 0xF0,
0xFF55 => {
if self.hdma_status == HDMA {
if v & 0x80 == 0 { self.hdma_status = NoDMA; };
return;
}
let src = (self.hdma[0] as u16 << 8) | (self.hdma[1] as u16);
let dst = (self.hdma[2] as u16 << 8) | (self.hdma[3] as u16) | 0x8000;
if !(src <= 0x7FF0 || (src >= 0xA000 && src <= 0xDFF0)) { fail!("HDMA transfer with illegal start address {:04X}", src); }
self.hdma_src = src;
self.hdma_dst = dst;
self.hdma_len = v & 0x7F;
self.hdma_status =
if v & 0x80 == 0x80 { HDMA }
else { GDMA };
},
_ => fail!(),
};
}
fn perform_vramdma(&mut self) -> uint {
match self.hdma_status
{
NoDMA => 0,
GDMA => self.perform_gdma(),
HDMA => self.perform_hdma(),
}
}
fn perform_hdma(&mut self) -> uint {
if self.gpu.may_hdma() == false
|| self.hdma_len == 0xFF
{
return 0;
}
self.perform_vramdma_row();
return 0x10;
}
fn perform_gdma(&mut self) -> uint {
let len = self.hdma_len as uint + 1;
for _i in range(0, len)
{
self.perform_vramdma_row();
}
self.hdma_status = NoDMA;
return len * 0x10;
}
fn perform_vramdma_row(&mut self) {
for j in range(0u16, 0x10)
{
let b: u8 = self.rb(self.hdma_src + j);
self.gpu.wb(self.hdma_dst + j, b);
}
self.hdma_src += 0x10;
self.hdma_dst += 0x10;
self.hdma_len -= 1;
}
}
|
// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Gamari <bgamari@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::gc::GC;
use std::iter::FromIterator;
use syntax::ast;
use syntax::ast::P;
use syntax::ast_util::empty_generics;
use syntax::codemap::{respan, mk_sp};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::quote::rt::ToTokens;
use syntax::parse::token;
use super::Builder;
use super::utils;
use super::super::node;
/// A visitor to build the struct for each register
pub struct BuildRegStructs<'a, 'b, 'c> {
builder: &'a mut Builder,
cx: &'b ExtCtxt<'c>,
}
impl<'a, 'b, 'c> node::RegVisitor for BuildRegStructs<'a, 'b, 'c> {
fn visit_prim_reg(&mut self, path: &Vec<String>, reg: &node::Reg,
width: node::RegWidth, fields: &Vec<node::Field>) {
for field in fields.iter() {
match build_field_type(self.cx, path, reg, field) {
Some(item) => self.builder.push_item(item),
None => {}
}
}
let reg_struct = build_reg_struct(self.cx, path, reg, width);
self.builder.push_item(reg_struct);
}
}
impl<'a, 'b, 'c> BuildRegStructs<'a, 'b, 'c> {
pub fn new(builder: &'a mut Builder, cx: &'b ExtCtxt<'c>)
-> BuildRegStructs<'a, 'b, 'c> {
BuildRegStructs {builder: builder, cx: cx}
}
}
/// Build a field type if necessary (e.g. in the case of an `EnumField`)
fn build_field_type<'a>(cx: &'a ExtCtxt, path: &Vec<String>,
reg: &node::Reg, field: &node::Field)
-> Option<P<ast::Item>> {
match field.ty.node {
node::EnumField { variants: ref variants, .. } => {
// FIXME(bgamari): We construct a path, then only take the last
// segment, this could be more efficient
let name: ast::Ident =
utils::field_type_path(cx, path, reg, field)
.segments.last().unwrap().identifier;
let enum_def: ast::EnumDef = ast::EnumDef {
variants: FromIterator::from_iter(
variants.iter().map(|v| box(GC) build_enum_variant(cx, v))),
};
let attrs: Vec<ast::Attribute> = vec!(
utils::list_attribute(cx, "deriving", vec!("FromPrimitive")),
utils::list_attribute(cx, "allow",
vec!("uppercase_variables",
"dead_code",
"non_camel_case_types",
"missing_doc")));
let item: P<ast::Item> = box(GC) ast::Item {
ident: name,
id: ast::DUMMY_NODE_ID,
node: ast::ItemEnum(enum_def, empty_generics()),
vis: ast::Public,
attrs: attrs,
span: field.ty.span,
};
Some(item)
},
_ => None,
}
}
/// Produce a register struct if necessary (for primitive typed registers).
/// In this case `None` indicates no struct is necessary, not failure.
/// For instance,
///
/// pub struct REG {_value: u32}
fn build_reg_struct<'a>(cx: &'a ExtCtxt, path: &Vec<String>,
reg: &node::Reg, _width: node::RegWidth) -> P<ast::Item> {
let packed_ty =
utils::reg_primitive_type(cx, reg)
.expect("Unexpected non-primitive reg");
let reg_doc = match reg.docstring {
Some(d) => token::get_ident(d.node).get().into_string(),
None => "no documentation".into_string(),
};
let docstring = format!("Register `{}`: {}",
reg.name.node,
reg_doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
let ty_name = utils::path_ident(cx, path);
let item = quote_item!(cx,
$doc_attr
#[allow(non_camel_case_types)]
pub struct $ty_name {
value: VolatileCell<$packed_ty>,
}
);
item.unwrap()
}
/// Build a variant of an `EnumField`
fn build_enum_variant<'a>(cx: &'a ExtCtxt,
variant: &node::Variant) -> ast::Variant {
let doc = match variant.docstring {
Some(d) => token::get_ident(d.node).get().into_string(),
None => "no documentation".into_string(),
};
let docstring = format!("`0x{:x}`. {}",
variant.value.node,
doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
respan(
mk_sp(variant.name.span.lo, variant.value.span.hi),
ast::Variant_ {
name: cx.ident_of(variant.name.node.as_slice()),
attrs: vec!(doc_attr),
kind: ast::TupleVariantKind(Vec::new()),
id: ast::DUMMY_NODE_ID,
disr_expr: Some(utils::expr_int(cx, variant.value.node as i64)),
vis: ast::Inherited,
}
)
}
ioreg/register: Clean up redundant lifetimes
// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Gamari <bgamari@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::gc::GC;
use std::iter::FromIterator;
use syntax::ast;
use syntax::ast::P;
use syntax::ast_util::empty_generics;
use syntax::codemap::{respan, mk_sp};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::quote::rt::ToTokens;
use syntax::parse::token;
use super::Builder;
use super::utils;
use super::super::node;
/// A visitor to build the struct for each register
pub struct BuildRegStructs<'a, 'b, 'c> {
builder: &'a mut Builder,
cx: &'b ExtCtxt<'c>,
}
impl<'a, 'b, 'c> node::RegVisitor for BuildRegStructs<'a, 'b, 'c> {
fn visit_prim_reg(&mut self, path: &Vec<String>, reg: &node::Reg,
width: node::RegWidth, fields: &Vec<node::Field>) {
for field in fields.iter() {
match build_field_type(self.cx, path, reg, field) {
Some(item) => self.builder.push_item(item),
None => {}
}
}
let reg_struct = build_reg_struct(self.cx, path, reg, width);
self.builder.push_item(reg_struct);
}
}
impl<'a, 'b, 'c> BuildRegStructs<'a, 'b, 'c> {
pub fn new(builder: &'a mut Builder, cx: &'b ExtCtxt<'c>)
-> BuildRegStructs<'a, 'b, 'c> {
BuildRegStructs {builder: builder, cx: cx}
}
}
/// Build a field type if necessary (e.g. in the case of an `EnumField`)
fn build_field_type(cx: &ExtCtxt, path: &Vec<String>,
reg: &node::Reg, field: &node::Field)
-> Option<P<ast::Item>> {
match field.ty.node {
node::EnumField { variants: ref variants, .. } => {
// FIXME(bgamari): We construct a path, then only take the last
// segment, this could be more efficient
let name: ast::Ident =
utils::field_type_path(cx, path, reg, field)
.segments.last().unwrap().identifier;
let enum_def: ast::EnumDef = ast::EnumDef {
variants: FromIterator::from_iter(
variants.iter().map(|v| box(GC) build_enum_variant(cx, v))),
};
let attrs: Vec<ast::Attribute> = vec!(
utils::list_attribute(cx, "deriving", vec!("FromPrimitive")),
utils::list_attribute(cx, "allow",
vec!("uppercase_variables",
"dead_code",
"non_camel_case_types",
"missing_doc")));
let item: P<ast::Item> = box(GC) ast::Item {
ident: name,
id: ast::DUMMY_NODE_ID,
node: ast::ItemEnum(enum_def, empty_generics()),
vis: ast::Public,
attrs: attrs,
span: field.ty.span,
};
Some(item)
},
_ => None,
}
}
/// Produce a register struct if necessary (for primitive typed registers).
/// In this case `None` indicates no struct is necessary, not failure.
/// For instance,
///
/// pub struct REG {_value: u32}
fn build_reg_struct(cx: &ExtCtxt, path: &Vec<String>,
reg: &node::Reg, _width: node::RegWidth) -> P<ast::Item> {
let packed_ty =
utils::reg_primitive_type(cx, reg)
.expect("Unexpected non-primitive reg");
let reg_doc = match reg.docstring {
Some(d) => token::get_ident(d.node).get().into_string(),
None => "no documentation".into_string(),
};
let docstring = format!("Register `{}`: {}",
reg.name.node,
reg_doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
let ty_name = utils::path_ident(cx, path);
let item = quote_item!(cx,
$doc_attr
#[allow(non_camel_case_types)]
pub struct $ty_name {
value: VolatileCell<$packed_ty>,
}
);
item.unwrap()
}
/// Build a variant of an `EnumField`
fn build_enum_variant(cx: &ExtCtxt, variant: &node::Variant)
-> ast::Variant {
let doc = match variant.docstring {
Some(d) => token::get_ident(d.node).get().into_string(),
None => "no documentation".into_string(),
};
let docstring = format!("`0x{:x}`. {}",
variant.value.node,
doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
respan(
mk_sp(variant.name.span.lo, variant.value.span.hi),
ast::Variant_ {
name: cx.ident_of(variant.name.node.as_slice()),
attrs: vec!(doc_attr),
kind: ast::TupleVariantKind(Vec::new()),
id: ast::DUMMY_NODE_ID,
disr_expr: Some(utils::expr_int(cx, variant.value.node as i64)),
vis: ast::Inherited,
}
)
}
|
use memory::{Memory, pack_u16, high_byte, low_byte, low_nibble, high_nibble};
use extensions::Incrementor;
use cpu::{Register, Flags, CarryFlag, HalfCarryFlag, ZeroFlag, SubtractFlag};
fn half_carry(val1: u8, val2: u8) -> bool {
return (low_nibble(val1) + low_nibble(val2)) > 0x0F
}
fn carry(val1: u8, val2: u8) -> bool {
return val1 as u16 + val2 as u16 > 0xFF;
}
/// Add the value of two registers and store it in the first register
pub fn add(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>) {
debug!("add: {} {}", first.read(), second);
let val1 = first.read();
let val2 = second;
let result = val1 + val2;
let mut flags = Flags::empty();
if half_carry(val1, val2) {
flags = HalfCarryFlag;
}
if carry(val1, val2) {
flags = flags | CarryFlag;
}
if result == 0 && flags == Flags::empty() {
flags = ZeroFlag;
}
first.write(result);
freg.write(flags);
}
/// Load the value from one register into another
pub fn ld_reg_to_reg(target: &mut Register<u8>, source: &Register<u8>) {
debug!("ld_reg_to_reg: {}", source.read());
let val = source.read();
target.write(val);
}
/// Loads the memory pointed to by the next two bytes into a register
pub fn ld_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let val = mem.read_byte(pc.read());
debug!("ld_next_byte_to_reg: {} {}", pc.read(), val);
pc.increment();
reg.write(val);
}
/// Loads the next two bytes into the passed in registers
pub fn ld_next_two_byte_into_reg_pair(mem: &Memory, pc: &mut Register<u16>,
hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("ld_next_two_bytes_into_reg_pair: {}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
/// Writes the passed in value to memory at the address pointed to by combined address parameters
pub fn write_value_to_memory_at_address(mem: &mut Memory, val: u8, addr_msb: u8, addr_lsb: u8) {
let addr = ((addr_msb as uint) << 8) + addr_lsb as uint;
mem.write_byte(addr as u16, val);
}
/// Increments the pair of registers as if they represent a 16-bit value
pub fn increment_register_pair(msb: &mut Register<u8>,lsb: &mut Register<u8>) {
let incremented_val = ((msb.read() as uint) << 8) + lsb.read() as uint + 1;
msb.write(high_byte(incremented_val as u16));
lsb.write(low_byte(incremented_val as u16));
}
/// Increment register by 1
/// Set ZeroFlag if result is 0
/// Set HalfCarryFlag if there is a carry from bit 3
pub fn increment_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(HalfCarryFlag);
flags.remove(ZeroFlag);
if low_nibble(val) == 0xF {
flags.insert(HalfCarryFlag);
}
reg.increment();
if reg.read() == 0 {
flags.insert(ZeroFlag);
}
freg.write(flags);
}
/// Decrement register by 1
/// Set ZeroFlag if result is 0
/// Set SubtractFlag
/// Set HalfCarryFlag if there is no borrow from bit 4
pub fn decrement_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read() | SubtractFlag;
flags.remove(ZeroFlag);
flags.remove(HalfCarryFlag);
if (val & 0x0F) > 0 {
flags = flags | HalfCarryFlag;
}
reg.decrement();
if reg.read() == 0x00 {
flags = flags | ZeroFlag;
}
freg.write(flags);
}
/// Rotate register left
/// Set ZeroFlag if result is zero
/// Set CarryFlag if bit 7 is 1
pub fn rotate_left_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
reg.write(val << 1);
}
/// Write sp to address with value of next two bytes
pub fn write_sp_to_address_immediate(mem: &mut Memory, pc: &mut Register<u16>, sp: &Register<u16>){
let addr = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("Writing {} to {}", sp.read(), addr);
mem.write_word(addr, sp.read());
}
/// Performs no operation and consumes a cycle
pub fn nop() {
debug!("nop");
}
/// Adds two sets of registers as 16 bit numbers with carries counted on bit 11 and 15
pub fn add_register_pair_to_register_pair(rega: &mut Register<u8>, regb: &mut Register<u8>, reg1: u8, reg2: u8, freg: &mut Register<Flags>) {
let first = pack_u16(rega.read(), regb.read());
let second = pack_u16(reg1, reg2);
let sum = first + second;
// Reset subtract flag, leave ZeroFlag alone
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(CarryFlag);
flags.remove(HalfCarryFlag);
if high_nibble(rega.read()) + high_nibble(reg1) > 15 {
flags.insert(CarryFlag)
}
if low_nibble(rega.read()) + low_nibble(reg1) > 15 {
flags.insert(HalfCarryFlag);
}
rega.write(high_byte(sum));
regb.write(low_byte(sum));
freg.write(flags);
}
pub fn ld_from_reg_pair_as_address(mem: &Memory, reg: &mut Register<u8>, reg1: u8, reg2: u8) {
let addr = pack_u16(reg1, reg2);
let val = mem.read_byte(addr);
reg.write(val);
}
pub fn decrement_register_pair(reg1: &mut Register<u8>, reg2: &mut Register<u8>) {
let val = pack_u16(reg1.read(), reg2.read());
let ans = val - 1;
reg1.write(high_byte(ans));
reg2.write(low_byte(ans));
}
pub fn rotate_right_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
if val == 0 {
freg.write(ZeroFlag);
} else {
if val & 0x01 == 1 {
freg.write(CarryFlag);
reg.write(val >> 1);
}
}
}
/// Add n to current address and jump to it - n = one byte signed immediate value
pub fn jump_by_signed_immediate(mem: &Memory, pc: &mut Register<u16>) {
let offset = mem.read_byte(pc.read());
pc.increment();
let current_pc = pc.read();
let mut new_pc = 0;
if (offset & 0x80) == 0 {
new_pc = current_pc + offset as u16;
} else {
new_pc = current_pc - (offset & 0x7F) as u16;
}
pc.write(new_pc);
}
pub fn relative_jmp_by_signed_immediate_if_not_flag(mem: &Memory, pc: &mut Register<u16>, freg: &Register<Flags>, flag: Flags) {
if !freg.read().contains(flag) {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn write_value_to_memory_at_address_and_increment_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address + 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn write_value_to_memory_at_address_and_decrement_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address - 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn relative_jmp_by_signed_immediate_if_flag(mem: &Memory, pc: &mut Register<u16>, freg: &Register<Flags>, flag: Flags) {
if freg.read().contains(flag) {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
increment_register_pair(high_byte, low_byte);
}
pub fn complement(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
reg.write(!val);
let mut flags = freg.read();
flags.insert(HalfCarryFlag);
flags.insert(SubtractFlag);
freg.write(flags);
}
/// Loads the next two bytes into the passed in register (sp)
pub fn ld_next_two_bytes_into_reg(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u16>) {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("ld_next_two_bytes_into_reg: {}", val);
reg.write(val);
}
pub fn increment_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
increment_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn decrement_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
decrement_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn ld_immediate_into_address(mem: &mut Memory, pc: &mut Register<u16>, hb: u8, lb: u8) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(pc.read());
pc.increment();
mem.write_byte(addr, val);
}
pub fn set_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut flags = freg.read();
flags.insert(flag);
freg.write(flags);
}
pub fn ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
decrement_register_pair(high_byte, low_byte);
}
pub fn reset_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut f = freg.read();
f.remove(flag);
freg.write(f);
}
pub fn copy_value_into_register(reg: &mut Register<u8>, val: u8) {
reg.write(val);
}
pub fn add_value_at_address(mem: &Memory, reg: &mut Register<u8>, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let val = mem.read_byte(pack_u16(hb, lb));
add(reg, val, freg);
}
#[test]
fn test_add_value_at_address() {
let mut first = Register::new(0x05);
let mut mem = Memory::new(0xFFFF);
let mut flags = Register::new(SubtractFlag | CarryFlag);
mem.write_byte(0x8476, 0x0B);
add_value_at_address(&mut mem, &mut first, 0x84, 0x76, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
mem.write_byte(0xADCD, 0x07);
add_value_at_address(&mut mem, &mut a, 0xAD, 0xCD, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
add_value_at_address(&mut mem, &mut a, 0x11, 0x11, &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_copy_value_into_register() {
let mut reg = Register::new(10);
copy_value_into_register(&mut reg, 0x18);
assert!(reg.read() == 0x18);
}
#[test]
fn test_reset_flag() {
let mut freg = Register::new(CarryFlag | ZeroFlag);
reset_flag(&mut freg, CarryFlag);
assert!(!freg.read().contains(CarryFlag));
assert!(freg.read().contains(ZeroFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair() {
let mut mem = Memory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCC);
}
#[test]
fn test_set_flag() {
let mut freg = Register::new(Flags::empty());
set_flag(&mut freg, CarryFlag);
assert!(freg.read().contains(CarryFlag));
}
#[test]
fn test_ld_immediate_into_address() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0xAD12);
mem.write_byte(0xAD12, 0xBB);
ld_immediate_into_address(&mut mem, &mut pc, 0x12, 0x34);
assert!(mem.read_byte(0x1234) == 0xBB);
assert!(pc.read() == 0xAD13);
}
#[test]
fn test_decrement_value_at_address() {
let mut mem = Memory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
mem.write_byte(0x1010, 1);
decrement_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 0);
assert!(freg.read().is_all());
mem.write_byte(0x01AB, 0x20);
decrement_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x1F);
assert!(freg.read() == CarryFlag | SubtractFlag);
freg.write(ZeroFlag);
mem.write_byte(0xABCD, 0xED);
decrement_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEC);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
}
#[test]
fn test_increment_value_at_address() {
let mut mem = Memory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
increment_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 1);
assert!(freg.read() == CarryFlag);
mem.write_byte(0x01AB, 0x1F);
increment_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x20);
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
freg.write(SubtractFlag);
mem.write_byte(0xABCD, 0xED);
increment_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEE);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_write_value_to_memory_at_address_and_decrement_register() {
let mut mem = Memory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x33, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
high_byte.write(0x11);
low_byte.write(0x00);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x1100) == 0x8);
assert!(high_byte.read() == 0x10);
assert!(low_byte.read() == 0xFF);
}
#[test]
fn test_ld_next_two_bytes_into_reg() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_bytes_into_reg(&mem, &mut pc, &mut reg);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDEAB);
}
#[test]
fn test_complement() {
let mut a = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | CarryFlag);
complement(&mut a, &mut freg);
assert!(a.read() == !0x11);
assert!(freg.read().is_all());
freg.write(Flags::empty());
complement(&mut a, &mut freg);
assert!(a.read() == 0x11);
assert!(freg.read().contains(HalfCarryFlag));
assert!(freg.read().contains(SubtractFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_increment_register_pair() {
let mut mem = Memory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCE);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_flag() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0x1234);
let mut freg = Register::new(Flags::empty());
// Forwards
freg.write(ZeroFlag);
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
freg.write(ZeroFlag);
mem.write_byte(0x128A, 0x81);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump back");
// No jump because ZeroFlag is not set
freg.write(Flags::empty());
mem.write_byte(0x1288, 0xFF);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128B, "Should not jump if ZeroFlag is not set. PC should increment to go past immediate value");
}
#[test]
fn test_write_value_to_memory_at_address_and_increment_register() {
let mut mem = Memory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x35, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
low_byte.write(0xFF);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x12FF) == 0x8);
assert!(high_byte.read() == 0x13);
assert!(low_byte.read() == 0x00);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_not_flag() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0x1234);
let mut freg = Register::new(Flags::empty());
// Forwards
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
mem.write_byte(0x1289, 0x85);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, CarryFlag);
assert!(pc.read() == 0x128B, "Should jump back");
// No jump because ZeroFlag is set
freg.write(ZeroFlag);
mem.write_byte(0x128B, 0xFF);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, ZeroFlag);
println!("{}", pc.read());
assert!(pc.read() == 0x128C, "Should not jump if ZeroFlag is set");
}
#[test]
fn test_jump_by_signed_immediate() {
let mut mem = Memory::new(0x10000);
let mut pc = Register::new(0x0101);
// 0x8A = -10 as i8
mem.write_byte(0x0101, 0x8A);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0xF8, "Should jump backwards with negative number");
mem.write_byte(0xF8, 0x37);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0x130);
}
#[test]
fn test_rotate_right_with_carry() {
let mut reg = Register::new(0x99);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x4C);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_decrement_register_pair() {
let mut reg1 = Register::new(0x70);
let mut reg2 = Register::new(0x00);
decrement_register_pair(&mut reg1, &mut reg2);
assert!(reg1.read() == 0x6F);
assert!(reg2.read() == 0xFF);
}
#[test]
fn test_ld_from_reg_pair_as_address() {
let mut mem = Memory::new(65000);
let mut rega = Register::new(0x00);
let mut reg1 = Register::new(0x12);
let mut reg2 = Register::new(0x34);
mem.write_byte(0x1234, 0xAA);
ld_from_reg_pair_as_address(&mem, &mut rega, reg1.read(), reg2.read());
assert!(rega.read() == 0xAA);
}
#[test]
fn test_add_register_pair_to_register_pair() {
let mut rega = Register::new(0x11);
let mut regb = Register::new(0x11);
let mut reg1 = Register::new(0x11);
let mut reg2 = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | SubtractFlag | HalfCarryFlag | CarryFlag);
// Basic add make sure ZeroFlag isn't affected
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x2222);
assert!(freg.read() == ZeroFlag);
rega.write(0xF1);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x12);
// Carry from bit 15
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x03BD);
assert!(freg.read() == ZeroFlag | CarryFlag);
rega.write(0x1E);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x16);
freg.write(ZeroFlag);
// Carry from bit 11
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x30C1);
println!("{}", freg.read().bits());
assert!(freg.read() == ZeroFlag | HalfCarryFlag);
}
#[test]
fn test_write_stack_pointer_to_address_immediate() {
let mut sp = Register::new(0xBEEF);
let mut pc = Register::new(0x111);
let mut mem = Memory::new(65647);
mem.write_byte(0x111, 0xAD);
mem.write_byte(0x112, 0xDE);
write_sp_to_address_immediate(&mut mem, &mut pc, &sp);
assert!(pc.read() == 0x113);
assert!(mem.read_word(0xDEAD) == 0xBEEF);
}
#[test]
fn test_rotate_left_with_carry() {
let mut reg = Register::new(0x0F);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left_with_carry(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0x1E);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left_with_carry(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0xFF);
rotate_left_with_carry(&mut regc, &mut freg);
// Carry should get set
assert!(regc.read() == 0xFE);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read() == ZeroFlag | SubtractFlag | HalfCarryFlag);
reg.write(0xF1);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0xF0);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_increment_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(ZeroFlag | HalfCarryFlag);
increment_register(&mut reg, &mut freg);
assert!(reg.read() == 2);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x0F);
increment_register(&mut regb, &mut freg);
assert!(regb.read() == 0x10);
assert!(freg.read() == HalfCarryFlag);
let mut regc = Register::new(0xFF);
freg.write(Flags::empty());
increment_register(&mut regc, &mut freg);
assert!(regc.read() == 0x00);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_add_reg_with_reg() {
let mut first = Register::new(0x05);
let mut second = Register::new(0x0B);
let mut flags = Register::new(SubtractFlag | CarryFlag);
add(&mut first, second.read(), &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
let mut b = Register::new(0x07);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
b.write(0);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_immediate() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_byte(11, 0xFA);
ld_immediate(&mem, &mut pc, &mut reg);
assert!(reg.read() == 0xFA);
assert!(pc.read() == 12);
}
#[test]
fn test_ld_reg_to_reg() {
let mut target = Register::new(5);
let mut source = Register::new(10);
ld_reg_to_reg(&mut target, &source);
assert!(target.read() == 10);
assert!(source.read() == 10);
}
#[test]
fn test_ld_next_two_byte_into_reg_pair() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
let mut reg2 = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_byte_into_reg_pair(&mem, &mut pc, &mut reg, &mut reg2);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDE);
assert!(reg2.read() == 0xAB);
}
#[test]
fn test_write_value_to_memory_at_address() {
let mut mem = Memory::new(65536);
let mut msb = 0xFF;
let mut lsb = 0x11;
let val = 100;
write_value_to_memory_at_address(&mut mem, val, msb, lsb);
assert!(mem.read_byte(0xFF11) == val, "Memory does match what was written");
}
#[test]
fn test_increment_register_pair() {
let mut msb = Register::new(0x11);
let mut lsb = Register::new(0x11);
increment_register_pair(&mut msb, &mut lsb);
assert!(msb.read() == 0x11);
assert!(lsb.read() == 0x12);
let mut msb_2 = Register::new(0x10);
let mut lsb_2 = Register::new(0xFF);
increment_register_pair(&mut msb_2, &mut lsb_2);
assert!(msb_2.read() == 0x11);
assert!(lsb_2.read() == 0x00);
}
Add adc method
use memory::{Memory, pack_u16, high_byte, low_byte, low_nibble, high_nibble};
use extensions::Incrementor;
use cpu::{Register, Flags, CarryFlag, HalfCarryFlag, ZeroFlag, SubtractFlag};
fn half_carry(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return (low_nibble(val1) + low_nibble(val2) + c) > 0x0F
}
fn carry(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return val1 as u16 + val2 as u16 + c > 0xFF;
}
fn add_internal(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>, includeCarry: bool) {
let val1 = first.read();
let val2 = second;
let mut result = val1 + val2;
let mut doCarry = false;
if freg.read().contains(CarryFlag) && includeCarry {
doCarry = true;
result += 1;
}
let mut flags = Flags::empty();
if half_carry(val1, val2, doCarry) {
flags = HalfCarryFlag;
}
if carry(val1, val2, doCarry) {
flags = flags | CarryFlag;
}
if result == 0 && flags == Flags::empty() {
flags = ZeroFlag;
}
first.write(result);
freg.write(flags);
}
/// Add the value of two registers and store it in the first register
pub fn add(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>) {
add_internal(first, second, freg, false);
}
/// Load the value from one register into another
pub fn ld_reg_to_reg(target: &mut Register<u8>, source: &Register<u8>) {
debug!("ld_reg_to_reg: {}", source.read());
let val = source.read();
target.write(val);
}
/// Loads the memory pointed to by the next two bytes into a register
pub fn ld_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let val = mem.read_byte(pc.read());
debug!("ld_next_byte_to_reg: {} {}", pc.read(), val);
pc.increment();
reg.write(val);
}
/// Loads the next two bytes into the passed in registers
pub fn ld_next_two_byte_into_reg_pair(mem: &Memory, pc: &mut Register<u16>,
hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("ld_next_two_bytes_into_reg_pair: {}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
/// Writes the passed in value to memory at the address pointed to by combined address parameters
pub fn write_value_to_memory_at_address(mem: &mut Memory, val: u8, addr_msb: u8, addr_lsb: u8) {
let addr = ((addr_msb as uint) << 8) + addr_lsb as uint;
mem.write_byte(addr as u16, val);
}
/// Increments the pair of registers as if they represent a 16-bit value
pub fn increment_register_pair(msb: &mut Register<u8>,lsb: &mut Register<u8>) {
let incremented_val = ((msb.read() as uint) << 8) + lsb.read() as uint + 1;
msb.write(high_byte(incremented_val as u16));
lsb.write(low_byte(incremented_val as u16));
}
/// Increment register by 1
/// Set ZeroFlag if result is 0
/// Set HalfCarryFlag if there is a carry from bit 3
pub fn increment_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(HalfCarryFlag);
flags.remove(ZeroFlag);
if low_nibble(val) == 0xF {
flags.insert(HalfCarryFlag);
}
reg.increment();
if reg.read() == 0 {
flags.insert(ZeroFlag);
}
freg.write(flags);
}
/// Decrement register by 1
/// Set ZeroFlag if result is 0
/// Set SubtractFlag
/// Set HalfCarryFlag if there is no borrow from bit 4
pub fn decrement_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read() | SubtractFlag;
flags.remove(ZeroFlag);
flags.remove(HalfCarryFlag);
if (val & 0x0F) > 0 {
flags = flags | HalfCarryFlag;
}
reg.decrement();
if reg.read() == 0x00 {
flags = flags | ZeroFlag;
}
freg.write(flags);
}
/// Rotate register left
/// Set ZeroFlag if result is zero
/// Set CarryFlag if bit 7 is 1
pub fn rotate_left_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
reg.write(val << 1);
}
/// Write sp to address with value of next two bytes
pub fn write_sp_to_address_immediate(mem: &mut Memory, pc: &mut Register<u16>, sp: &Register<u16>){
let addr = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("Writing {} to {}", sp.read(), addr);
mem.write_word(addr, sp.read());
}
/// Performs no operation and consumes a cycle
pub fn nop() {
debug!("nop");
}
/// Adds two sets of registers as 16 bit numbers with carries counted on bit 11 and 15
pub fn add_register_pair_to_register_pair(rega: &mut Register<u8>, regb: &mut Register<u8>, reg1: u8, reg2: u8, freg: &mut Register<Flags>) {
let first = pack_u16(rega.read(), regb.read());
let second = pack_u16(reg1, reg2);
let sum = first + second;
// Reset subtract flag, leave ZeroFlag alone
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(CarryFlag);
flags.remove(HalfCarryFlag);
if high_nibble(rega.read()) + high_nibble(reg1) > 15 {
flags.insert(CarryFlag)
}
if low_nibble(rega.read()) + low_nibble(reg1) > 15 {
flags.insert(HalfCarryFlag);
}
rega.write(high_byte(sum));
regb.write(low_byte(sum));
freg.write(flags);
}
pub fn ld_from_reg_pair_as_address(mem: &Memory, reg: &mut Register<u8>, reg1: u8, reg2: u8) {
let addr = pack_u16(reg1, reg2);
let val = mem.read_byte(addr);
reg.write(val);
}
pub fn decrement_register_pair(reg1: &mut Register<u8>, reg2: &mut Register<u8>) {
let val = pack_u16(reg1.read(), reg2.read());
let ans = val - 1;
reg1.write(high_byte(ans));
reg2.write(low_byte(ans));
}
pub fn rotate_right_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
if val == 0 {
freg.write(ZeroFlag);
} else {
if val & 0x01 == 1 {
freg.write(CarryFlag);
reg.write(val >> 1);
}
}
}
/// Add n to current address and jump to it - n = one byte signed immediate value
pub fn jump_by_signed_immediate(mem: &Memory, pc: &mut Register<u16>) {
let offset = mem.read_byte(pc.read());
pc.increment();
let current_pc = pc.read();
let mut new_pc = 0;
if (offset & 0x80) == 0 {
new_pc = current_pc + offset as u16;
} else {
new_pc = current_pc - (offset & 0x7F) as u16;
}
pc.write(new_pc);
}
pub fn relative_jmp_by_signed_immediate_if_not_flag(mem: &Memory, pc: &mut Register<u16>, freg: &Register<Flags>, flag: Flags) {
if !freg.read().contains(flag) {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn write_value_to_memory_at_address_and_increment_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address + 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn write_value_to_memory_at_address_and_decrement_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address - 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn relative_jmp_by_signed_immediate_if_flag(mem: &Memory, pc: &mut Register<u16>, freg: &Register<Flags>, flag: Flags) {
if freg.read().contains(flag) {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
increment_register_pair(high_byte, low_byte);
}
pub fn complement(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
reg.write(!val);
let mut flags = freg.read();
flags.insert(HalfCarryFlag);
flags.insert(SubtractFlag);
freg.write(flags);
}
/// Loads the next two bytes into the passed in register (sp)
pub fn ld_next_two_bytes_into_reg(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u16>) {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
debug!("ld_next_two_bytes_into_reg: {}", val);
reg.write(val);
}
pub fn increment_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
increment_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn decrement_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
decrement_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn ld_immediate_into_address(mem: &mut Memory, pc: &mut Register<u16>, hb: u8, lb: u8) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(pc.read());
pc.increment();
mem.write_byte(addr, val);
}
pub fn set_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut flags = freg.read();
flags.insert(flag);
freg.write(flags);
}
pub fn ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
decrement_register_pair(high_byte, low_byte);
}
pub fn reset_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut f = freg.read();
f.remove(flag);
freg.write(f);
}
pub fn copy_value_into_register(reg: &mut Register<u8>, val: u8) {
reg.write(val);
}
pub fn add_value_at_address(mem: &Memory, reg: &mut Register<u8>, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let val = mem.read_byte(pack_u16(hb, lb));
add(reg, val, freg);
}
pub fn adc(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
add_internal(reg, val, freg, true);
}
#[test]
fn test_adc() {
let mut first = Register::new(0x05);
let mut flags = Register::new(SubtractFlag | CarryFlag);
adc(&mut first, 0x0A, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
flags.write(CarryFlag);
let mut a = Register::new(0xFA);
adc(&mut a, 0x06, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
flags.write(CarryFlag);
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == Flags::empty());
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_add_value_at_address() {
let mut first = Register::new(0x05);
let mut mem = Memory::new(0xFFFF);
let mut flags = Register::new(SubtractFlag | CarryFlag);
mem.write_byte(0x8476, 0x0B);
add_value_at_address(&mut mem, &mut first, 0x84, 0x76, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
mem.write_byte(0xADCD, 0x07);
add_value_at_address(&mut mem, &mut a, 0xAD, 0xCD, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
add_value_at_address(&mut mem, &mut a, 0x11, 0x11, &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_copy_value_into_register() {
let mut reg = Register::new(10);
copy_value_into_register(&mut reg, 0x18);
assert!(reg.read() == 0x18);
}
#[test]
fn test_reset_flag() {
let mut freg = Register::new(CarryFlag | ZeroFlag);
reset_flag(&mut freg, CarryFlag);
assert!(!freg.read().contains(CarryFlag));
assert!(freg.read().contains(ZeroFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair() {
let mut mem = Memory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCC);
}
#[test]
fn test_set_flag() {
let mut freg = Register::new(Flags::empty());
set_flag(&mut freg, CarryFlag);
assert!(freg.read().contains(CarryFlag));
}
#[test]
fn test_ld_immediate_into_address() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0xAD12);
mem.write_byte(0xAD12, 0xBB);
ld_immediate_into_address(&mut mem, &mut pc, 0x12, 0x34);
assert!(mem.read_byte(0x1234) == 0xBB);
assert!(pc.read() == 0xAD13);
}
#[test]
fn test_decrement_value_at_address() {
let mut mem = Memory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
mem.write_byte(0x1010, 1);
decrement_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 0);
assert!(freg.read().is_all());
mem.write_byte(0x01AB, 0x20);
decrement_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x1F);
assert!(freg.read() == CarryFlag | SubtractFlag);
freg.write(ZeroFlag);
mem.write_byte(0xABCD, 0xED);
decrement_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEC);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
}
#[test]
fn test_increment_value_at_address() {
let mut mem = Memory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
increment_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 1);
assert!(freg.read() == CarryFlag);
mem.write_byte(0x01AB, 0x1F);
increment_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x20);
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
freg.write(SubtractFlag);
mem.write_byte(0xABCD, 0xED);
increment_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEE);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_write_value_to_memory_at_address_and_decrement_register() {
let mut mem = Memory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x33, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
high_byte.write(0x11);
low_byte.write(0x00);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x1100) == 0x8);
assert!(high_byte.read() == 0x10);
assert!(low_byte.read() == 0xFF);
}
#[test]
fn test_ld_next_two_bytes_into_reg() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_bytes_into_reg(&mem, &mut pc, &mut reg);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDEAB);
}
#[test]
fn test_complement() {
let mut a = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | CarryFlag);
complement(&mut a, &mut freg);
assert!(a.read() == !0x11);
assert!(freg.read().is_all());
freg.write(Flags::empty());
complement(&mut a, &mut freg);
assert!(a.read() == 0x11);
assert!(freg.read().contains(HalfCarryFlag));
assert!(freg.read().contains(SubtractFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_increment_register_pair() {
let mut mem = Memory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCE);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_flag() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0x1234);
let mut freg = Register::new(Flags::empty());
// Forwards
freg.write(ZeroFlag);
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
freg.write(ZeroFlag);
mem.write_byte(0x128A, 0x81);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump back");
// No jump because ZeroFlag is not set
freg.write(Flags::empty());
mem.write_byte(0x1288, 0xFF);
relative_jmp_by_signed_immediate_if_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128B, "Should not jump if ZeroFlag is not set. PC should increment to go past immediate value");
}
#[test]
fn test_write_value_to_memory_at_address_and_increment_register() {
let mut mem = Memory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x35, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
low_byte.write(0xFF);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x12FF) == 0x8);
assert!(high_byte.read() == 0x13);
assert!(low_byte.read() == 0x00);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_not_flag() {
let mut mem = Memory::new(0xFFFF);
let mut pc = Register::new(0x1234);
let mut freg = Register::new(Flags::empty());
// Forwards
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, ZeroFlag);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
mem.write_byte(0x1289, 0x85);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, CarryFlag);
assert!(pc.read() == 0x128B, "Should jump back");
// No jump because ZeroFlag is set
freg.write(ZeroFlag);
mem.write_byte(0x128B, 0xFF);
relative_jmp_by_signed_immediate_if_not_flag(&mem, &mut pc, &freg, ZeroFlag);
println!("{}", pc.read());
assert!(pc.read() == 0x128C, "Should not jump if ZeroFlag is set");
}
#[test]
fn test_jump_by_signed_immediate() {
let mut mem = Memory::new(0x10000);
let mut pc = Register::new(0x0101);
// 0x8A = -10 as i8
mem.write_byte(0x0101, 0x8A);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0xF8, "Should jump backwards with negative number");
mem.write_byte(0xF8, 0x37);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0x130);
}
#[test]
fn test_rotate_right_with_carry() {
let mut reg = Register::new(0x99);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x4C);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_decrement_register_pair() {
let mut reg1 = Register::new(0x70);
let mut reg2 = Register::new(0x00);
decrement_register_pair(&mut reg1, &mut reg2);
assert!(reg1.read() == 0x6F);
assert!(reg2.read() == 0xFF);
}
#[test]
fn test_ld_from_reg_pair_as_address() {
let mut mem = Memory::new(65000);
let mut rega = Register::new(0x00);
let mut reg1 = Register::new(0x12);
let mut reg2 = Register::new(0x34);
mem.write_byte(0x1234, 0xAA);
ld_from_reg_pair_as_address(&mem, &mut rega, reg1.read(), reg2.read());
assert!(rega.read() == 0xAA);
}
#[test]
fn test_add_register_pair_to_register_pair() {
let mut rega = Register::new(0x11);
let mut regb = Register::new(0x11);
let mut reg1 = Register::new(0x11);
let mut reg2 = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | SubtractFlag | HalfCarryFlag | CarryFlag);
// Basic add make sure ZeroFlag isn't affected
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x2222);
assert!(freg.read() == ZeroFlag);
rega.write(0xF1);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x12);
// Carry from bit 15
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x03BD);
assert!(freg.read() == ZeroFlag | CarryFlag);
rega.write(0x1E);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x16);
freg.write(ZeroFlag);
// Carry from bit 11
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x30C1);
println!("{}", freg.read().bits());
assert!(freg.read() == ZeroFlag | HalfCarryFlag);
}
#[test]
fn test_write_stack_pointer_to_address_immediate() {
let mut sp = Register::new(0xBEEF);
let mut pc = Register::new(0x111);
let mut mem = Memory::new(65647);
mem.write_byte(0x111, 0xAD);
mem.write_byte(0x112, 0xDE);
write_sp_to_address_immediate(&mut mem, &mut pc, &sp);
assert!(pc.read() == 0x113);
assert!(mem.read_word(0xDEAD) == 0xBEEF);
}
#[test]
fn test_rotate_left_with_carry() {
let mut reg = Register::new(0x0F);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left_with_carry(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0x1E);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left_with_carry(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0xFF);
rotate_left_with_carry(&mut regc, &mut freg);
// Carry should get set
assert!(regc.read() == 0xFE);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read() == ZeroFlag | SubtractFlag | HalfCarryFlag);
reg.write(0xF1);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0xF0);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_increment_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(ZeroFlag | HalfCarryFlag);
increment_register(&mut reg, &mut freg);
assert!(reg.read() == 2);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x0F);
increment_register(&mut regb, &mut freg);
assert!(regb.read() == 0x10);
assert!(freg.read() == HalfCarryFlag);
let mut regc = Register::new(0xFF);
freg.write(Flags::empty());
increment_register(&mut regc, &mut freg);
assert!(regc.read() == 0x00);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_add_reg_with_reg() {
let mut first = Register::new(0x05);
let mut second = Register::new(0x0B);
let mut flags = Register::new(SubtractFlag | CarryFlag);
add(&mut first, second.read(), &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
let mut b = Register::new(0x07);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
b.write(0);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_immediate() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_byte(11, 0xFA);
ld_immediate(&mem, &mut pc, &mut reg);
assert!(reg.read() == 0xFA);
assert!(pc.read() == 12);
}
#[test]
fn test_ld_reg_to_reg() {
let mut target = Register::new(5);
let mut source = Register::new(10);
ld_reg_to_reg(&mut target, &source);
assert!(target.read() == 10);
assert!(source.read() == 10);
}
#[test]
fn test_ld_next_two_byte_into_reg_pair() {
let mut mem = Memory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
let mut reg2 = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_byte_into_reg_pair(&mem, &mut pc, &mut reg, &mut reg2);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDE);
assert!(reg2.read() == 0xAB);
}
#[test]
fn test_write_value_to_memory_at_address() {
let mut mem = Memory::new(65536);
let mut msb = 0xFF;
let mut lsb = 0x11;
let val = 100;
write_value_to_memory_at_address(&mut mem, val, msb, lsb);
assert!(mem.read_byte(0xFF11) == val, "Memory does match what was written");
}
#[test]
fn test_increment_register_pair() {
let mut msb = Register::new(0x11);
let mut lsb = Register::new(0x11);
increment_register_pair(&mut msb, &mut lsb);
assert!(msb.read() == 0x11);
assert!(lsb.read() == 0x12);
let mut msb_2 = Register::new(0x10);
let mut lsb_2 = Register::new(0xFF);
increment_register_pair(&mut msb_2, &mut lsb_2);
assert!(msb_2.read() == 0x11);
assert!(lsb_2.read() == 0x00);
}
|
use memory::{EmptyMemory, Memory, pack_u16, high_byte, low_byte, low_nibble, high_nibble};
use extensions::Incrementor;
use cpu::{Register, Flags, CarryFlag, HalfCarryFlag, ZeroFlag, SubtractFlag};
fn half_carry_for_add(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return (low_nibble(val1) + low_nibble(val2) + c) > 0x0F
}
fn carry_for_add(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return val1 as u16 + val2 as u16 + c > 0xFF;
}
fn add_internal(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>, with_carry: bool) {
let val1 = first.read();
let val2 = second;
let mut result = val1 + val2;
let mut do_carry = false;
if freg.read().contains(CarryFlag) && with_carry {
do_carry = true;
result += 1;
}
let mut flags = Flags::empty();
if half_carry_for_add(val1, val2, do_carry) {
flags = HalfCarryFlag;
}
if carry_for_add(val1, val2, do_carry) {
flags = flags | CarryFlag;
}
if result == 0 {
flags = ZeroFlag;
}
first.write(result);
freg.write(flags);
}
/// Add the value of two registers and store it in the first register
pub fn add(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>) {
add_internal(first, second, freg, false);
}
/// Load the value from one register into another
pub fn ld_reg_to_reg(target: &mut Register<u8>, source: &Register<u8>) {
debug!("ld_reg_to_reg: {:X}", source.read());
let val = source.read();
target.write(val);
}
/// Loads the memory pointed to by the next two bytes into a register
pub fn ld_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let val = u8_immediate(mem, pc);
debug!("ld_next_byte_to_reg: {:X}", val);
reg.write(val);
}
/// Loads the next two bytes into the passed in registers
pub fn ld_u16_immediate(mem: &Memory, pc: &mut Register<u16>,
hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = u16_immediate(mem, pc);
debug!("ld_next_two_bytes_into_reg_pair: {:X}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
/// Writes the passed in value to memory at the address pointed to by combined address parameters
pub fn write_value_to_memory_at_address(mem: &mut Memory, val: u8, addr_msb: u8, addr_lsb: u8) {
let addr = pack_u16(addr_msb, addr_lsb);
debug!("write val to mem addr: {:X} val: {:X}", addr, val);
mem.write_byte(addr as u16, val);
}
/// Increments the pair of registers as if they represent a 16-bit value
pub fn increment_register_pair(msb: &mut Register<u8>,lsb: &mut Register<u8>) {
debug!("increment register pair");
let incremented_val = ((msb.read() as uint) << 8) + lsb.read() as uint + 1;
msb.write(high_byte(incremented_val as u16));
lsb.write(low_byte(incremented_val as u16));
}
/// Increment register by 1
/// Set ZeroFlag if result is 0
/// Set HalfCarryFlag if there is a carry from bit 3
pub fn increment_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(HalfCarryFlag);
flags.remove(ZeroFlag);
if low_nibble(val) == 0xF {
flags.insert(HalfCarryFlag);
}
reg.increment();
if reg.read() == 0 {
flags.insert(ZeroFlag);
}
debug!("increment reg new_val: {:X}", reg.read());
freg.write(flags);
}
/// Decrement register by 1
/// Set ZeroFlag if result is 0
/// Set SubtractFlag
/// Set HalfCarryFlag if there is no borrow from bit 4
pub fn decrement_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read() | SubtractFlag;
flags.remove(ZeroFlag);
flags.remove(HalfCarryFlag);
if (val & 0x0F) > 0 {
flags = flags | HalfCarryFlag;
}
reg.decrement();
if reg.read() == 0x00 {
flags = flags | ZeroFlag;
}
freg.write(flags);
}
fn internal_rotate_left_with_carry(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let bit_7 = (val & 0x80) >> 7;
if val == 0 {
freg.write(ZeroFlag);
return val;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return (val << 1) | bit_7;
}
/// Rotate register left
/// Set ZeroFlag if result is zero
/// Set CarryFlag if bit 7 is 1
pub fn rotate_left_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_left_with_carry(reg.read(), freg);
reg.write(val);
}
pub fn rotate_left_with_carry_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_left_with_carry(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
/// Write sp to address with value of next two bytes
pub fn write_u16_immediate_address(mem: &mut Memory, pc: &mut Register<u16>, val: u16){
let addr = u16_immediate(mem, pc);
debug!("Writing {} to {}", val, addr);
mem.write_word(addr, val);
}
/// Performs no operation and consumes a cycle
pub fn nop() {
}
/// Adds two sets of registers as 16 bit numbers with carries counted on bit 11 and 15
pub fn add_register_pair_to_register_pair(rega: &mut Register<u8>, regb: &mut Register<u8>, reg1: u8, reg2: u8, freg: &mut Register<Flags>) {
let first = pack_u16(rega.read(), regb.read());
let second = pack_u16(reg1, reg2);
let sum = first + second;
// Reset subtract flag, leave ZeroFlag alone
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(CarryFlag);
flags.remove(HalfCarryFlag);
if high_nibble(rega.read()) + high_nibble(reg1) > 15 {
flags.insert(CarryFlag)
}
if low_nibble(rega.read()) + low_nibble(reg1) > 15 {
flags.insert(HalfCarryFlag);
}
rega.write(high_byte(sum));
regb.write(low_byte(sum));
freg.write(flags);
}
pub fn ld_from_address(mem: &Memory, reg: &mut Register<u8>, addr: u16) {
let val = mem.read_byte(addr);
reg.write(val);
}
pub fn decrement_register_pair(reg1: &mut Register<u8>, reg2: &mut Register<u8>) {
let val = pack_u16(reg1.read(), reg2.read());
let ans = val - 1;
reg1.write(high_byte(ans));
reg2.write(low_byte(ans));
}
fn internal_rotate_right_with_carry(val: u8, freg: &mut Register<Flags>) -> u8 {
let bit_1 = (val & 0x01) << 7;
if val == 0 {
freg.write(ZeroFlag);
return 0;
} else {
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
return ((val >> 1) | bit_1);
}
}
pub fn rotate_right_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_right_with_carry(reg.read(), freg);
reg.write(val);
}
pub fn rotate_right_with_carry_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_right_with_carry(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
/// Add n to current address and jump to it - n = one byte signed immediate value
pub fn jump_by_signed_immediate(mem: &Memory, pc: &mut Register<u16>) {
let offset = u8_immediate(mem, pc);
let current_pc = pc.read();
let mut new_pc = 0;
if (offset & 0x80) == 0 {
new_pc = current_pc + offset as u16;
debug!("jmp signed immediate new_pc: {:X}", new_pc);
} else {
new_pc = (current_pc as i16 + (offset as i8) as i16) as u16;
debug!("jmp signed immediate new_pc: {:X}", new_pc);
}
debug!("jmp {:X}", new_pc);
pc.write(new_pc);
}
pub fn write_value_to_memory_at_address_and_increment_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address + 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn write_value_to_memory_at_address_and_decrement_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address - 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn relative_jmp_by_signed_immediate_if_true(mem: &Memory, pc: &mut Register<u16>, should_jump: bool) {
if should_jump {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
increment_register_pair(high_byte, low_byte);
debug!("ld from address and inc reg pair - addr: {:X} val: {:X} reg_pair_val: {:X}", address, val, pack_u16(high_byte.read(), low_byte.read()));
}
pub fn complement(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
reg.write(!val);
let mut flags = freg.read();
flags.insert(HalfCarryFlag);
flags.insert(SubtractFlag);
freg.write(flags);
}
/// Loads the next two bytes into the passed in register (sp)
pub fn ld_next_two_bytes_into_reg(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u16>) {
let val = u16_immediate(mem, pc);
debug!("ld_next_two_bytes_into_reg: {:X}", val);
reg.write(val);
}
pub fn increment_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
increment_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn decrement_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
decrement_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn ld_u8_immediate_into_address(mem: &mut Memory, pc: &mut Register<u16>, hb: u8, lb: u8) {
let addr = pack_u16(hb, lb);
let val = u8_immediate(mem, pc);
mem.write_byte(addr, val);
}
pub fn set_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut flags = freg.read();
flags.insert(flag);
freg.write(flags);
}
pub fn ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
decrement_register_pair(high_byte, low_byte);
}
pub fn ccf(freg: &mut Register<Flags>) {
let mut f = freg.read();
f.remove(SubtractFlag);
f.remove(HalfCarryFlag);
f.toggle(CarryFlag);
freg.write(f);
}
pub fn ld_u8(reg: &mut Register<u8>, val: u8) {
debug!("load val: {:X} into register", val);
reg.write(val);
}
pub fn add_value_at_address(mem: &Memory, reg: &mut Register<u8>, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let val = mem.read_byte(pack_u16(hb, lb));
add(reg, val, freg);
}
pub fn adc(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
add_internal(reg, val, freg, true);
}
pub fn adc_value_at_address(mem: &Memory, reg: &mut Register<u8>, address: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(address);
adc(reg, val, freg);
}
fn half_carry_for_subtract(val1: u8, val2: u8, carry: u8) -> bool {
if low_nibble(val2) + carry == 0x10 && val1 >= 0x10 {
return true;
} else {
return low_nibble(val1) >= (low_nibble(val2) + carry);
}
}
fn carry_for_subtract(val1: u8, val2: u8, carry: u8) -> bool {
return val1 >= (val2 + carry);
}
pub fn internal_sub(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>, with_carry: bool) {
let reg_val = reg.read();
let mut flags = SubtractFlag;
let mut carry = 0;
if freg.read().contains(CarryFlag) && with_carry {
carry = 1;
}
if half_carry_for_subtract(reg_val, val, carry) {
flags.insert(HalfCarryFlag);
}
if carry_for_subtract(reg_val, val, carry) {
flags.insert(CarryFlag);
}
let result = reg_val - val - carry;
if result == 0 {
flags.insert(ZeroFlag);
}
debug!("internal sub result {:X}", result);
reg.write(result);
freg.write(flags);
}
pub fn sub(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
internal_sub(reg, val, freg, false);
}
pub fn sbc(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
internal_sub(reg, val, freg, true);
}
pub fn sub_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
internal_sub(reg, val, freg, false);
}
pub fn sbc_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
internal_sub(reg, val, freg, true);
}
pub fn and(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = HalfCarryFlag;
let result = reg.read() & val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn xor(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = Flags::empty();
let result = reg.read() ^ val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn and_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
and(reg, val, freg);
}
pub fn xor_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
xor(reg, val, freg);
}
pub fn or(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = Flags::empty();
let result = reg.read() | val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn compare(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let reg_val = reg.read();
let mut flags = SubtractFlag;
let mut carry = 0;
if half_carry_for_subtract(reg_val, val, carry) {
flags.insert(HalfCarryFlag);
}
if carry_for_subtract(reg_val, val, carry) {
flags.insert(CarryFlag);
}
if (reg_val == val) {
flags.insert(ZeroFlag);
}
freg.write(flags);
}
pub fn or_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
or(reg, val, freg);
}
pub fn compare_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
compare(reg, val, freg);
}
pub fn push(mem: &mut Memory, sp: &mut Register<u16>, val: u16) {
sp.decrement();
sp.decrement();
debug!("push sp:{:X} val:{:X}", sp.read(), val);
mem.write_word(sp.read(), val);
}
fn pop_internal(mem: &Memory, sp: &mut Register<u16>) -> u16 {
let val = mem.read_word(sp.read());
sp.increment();
sp.increment();
return val;
}
pub fn pop(mem: &Memory, sp: &mut Register<u16>, hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = pop_internal(mem, sp);
debug!("pop val: {:X}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
pub fn ret(mem: &Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, should_execute: bool) {
if should_execute {
let addr = pop_internal(mem, sp);
debug!("ret to {:X}", addr);
pc.write(addr);
}
}
pub fn jp_u16_immediate(mem: &Memory, pc: &mut Register<u16>) {
let addr = u16_immediate(mem, pc);
debug!("jmp {:X}", addr);
pc.write(addr);
}
pub fn jp_u16_immediate_if_true(mem: &Memory,pc: &mut Register<u16>, should_jump: bool) {
if should_jump {
jp_u16_immediate(mem, pc);
} else {
pc.increment();
pc.increment();
}
}
pub fn call_immediate_if_true(mem: &mut Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, should_jump: bool) {
if should_jump {
let new_addr = u16_immediate(mem, pc);
call(mem, pc, sp, new_addr);
} else {
pc.increment();
pc.increment();
}
}
pub fn add_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>, freg: &mut Register<Flags>, with_carry: bool) {
let val = u8_immediate(mem, pc);
add_internal(reg, val, freg, with_carry);
}
pub fn call(mem: &mut Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, addr: u16) {
push(mem, sp, pc.read());
pc.write(addr);
}
pub fn sub_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>, freg: &mut Register<Flags>, with_carry: bool) {
let val = u8_immediate(mem, pc);
internal_sub(reg, val, freg, with_carry);
}
pub fn reti(mem: &Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, ime: &mut bool) {
ret(mem, pc, sp, true);
*ime = true;
}
pub fn u8_immediate(mem: &Memory, pc: &mut Register<u16>) -> u8 {
let val = mem.read_byte(pc.read());
pc.increment();
return val;
}
pub fn u16_immediate(mem: &Memory, pc: &mut Register<u16>) -> u16 {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
return val;
}
pub fn jp(pc: &mut Register<u16>, addr: u16) {
pc.write(addr);
}
pub fn bit(reg_val: u8, pos: u8, freg: &mut Register<Flags>) {
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.insert(HalfCarryFlag);
assert!(pos < 8, "Bit positions are 0-7");
let mask = 0x01 << pos as uint;
let val = reg_val & mask;
if val == 0 {
flags.insert(ZeroFlag);
} else {
flags.remove(ZeroFlag);
}
freg.write(flags);
}
pub fn byte_at_address(mem: &Memory, addr: u16) -> u8 {
return mem.read_byte(addr);
}
pub fn res(reg: &mut Register<u8>, pos: u8) {
let mask = 0x01 << pos as uint;
let val = reg.read();
let reset_val = val & (!mask);
reg.write(reset_val);
}
pub fn res_at_addr(mem: &mut Memory, address: u16, pos: u8) {
let mask = 0x01 << pos as uint;
let val = mem.read_byte(address);
let reset_val = val & (!mask);
mem.write_byte(address, reset_val);
}
pub fn set(reg: &mut Register<u8>, pos: u8) {
let mask = 0x01 << pos as uint;
let val = reg.read();
let reset_val = val | mask;
reg.write(reset_val);
}
pub fn set_at_addr(mem: &mut Memory, address: u16, pos: u8) {
let mask = 0x01 << pos as uint;
let val = mem.read_byte(address);
let reset_val = val | mask;
mem.write_byte(address, reset_val);
}
pub fn rotate_left(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_left(reg.read(), freg);
reg.write(val);
}
pub fn rotate_left_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_left(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_rotate_left(val: u8, freg: &mut Register<Flags>) -> u8 {
let mut carry = 0;
if freg.read().contains(CarryFlag) {
carry = 1;
}
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return val;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return (val << 1) | carry;
}
fn internal_rotate_right(val: u8, freg: &mut Register<Flags>) -> u8 {
let mut carry = 0;
if freg.read().contains(CarryFlag) {
carry = 0x80;
}
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return val;
}
if val & 0x01 != 0 {
freg.write(CarryFlag);
}
return (val >> 1) | carry;
}
pub fn rotate_right(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_right(reg.read(), freg);
reg.write(val);
}
pub fn rotate_right_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_right(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_sla(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return 0;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return val << 1;
}
pub fn sla(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sla(reg.read(), freg);
reg.write(val);
}
pub fn sla_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sla(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_sra(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
return 0;
}
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
let bit_7 = val & 0x80;
return (val >> 1) | bit_7;
}
pub fn sra(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sra(reg.read(), freg);
reg.write(val);
}
pub fn sra_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sra(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_swap(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
}
return (low_nibble(val) << 4) + high_nibble(val);
}
pub fn swap(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_swap(reg.read(), freg);
reg.write(val);
}
pub fn swap_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_swap(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_srl(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
if val == 0 {
freg.write(ZeroFlag);
}
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
return val >> 1;
}
pub fn srl(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sra(reg.read(), freg);
reg.write(val);
}
pub fn srl_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sra(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
pub fn disable_interrupts(ime: &mut bool) {
debug!("disable interrupts");
*ime = false;
}
pub fn write_value_to_u16_immediate(mem: &mut Memory, pc: &mut Register<u16>, val: u8) {
let addr = u16_immediate(mem, pc);
debug!("write value to u16 immediate val: {:X} addr: {:X}", val, addr);
mem.write_byte(addr, val);
}
pub fn write_val_FF00_plus_immediate(mem: &mut Memory, pc: &mut Register<u16>, val: u8) {
let lb = u8_immediate(mem, pc);
let addr = 0xFF00 + lb as u16;
debug!("write val: {:X} to addr: {:X}", val, addr);
mem.write_byte(addr, val);
}
pub fn pop_flags(mem: &Memory, sp: &mut Register<u16>, a: &mut Register<u8>, f: &mut Register<Flags>) {
let val = pop_internal(mem, sp);
debug!("POP AF val: {:X}", val);
a.write(high_byte(val));
f.write(Flags::from_bits_truncate(low_byte(val)));
}
pub fn load_val_FF00_plus_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let lb = u8_immediate(mem, pc);
let addr = 0xFF00 + lb as u16;
let val = mem.read_byte(addr);
debug!("load val: {:X} into a", val);
reg.write(val);
}
#[test]
fn test_load_val_FF00_plus_immediate() {
}
#[test]
fn test_pop_flags() {
let mut a = Register::new(0x00);
let mut f = Register::new(HalfCarryFlag);
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xFFFC);
mem.write_word(sp.read(), 0xBCDF);
pop_flags(&mem, &mut sp, &mut a, &mut f);
assert!(sp.read() == 0xFFFE);
assert!(a.read() == 0xBC);
assert!(f.read() == ZeroFlag | SubtractFlag | CarryFlag);
}
#[test]
fn test_write_val_FF00_plus_immediate() {
}
#[test]
fn test_write_value_to_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x8284;
let val = 0x92;
let mut pc = Register::new(0x1234);
mem.write_word(0x1234, addr);
write_value_to_u16_immediate(&mut mem, &mut pc, val);
assert!(pc.read() == 0x1236);
assert!(mem.read_byte(addr) == val);
}
#[test]
fn test_disable_interrupts() {
let mut a = true;
disable_interrupts(&mut a);
assert!(a == false);
}
#[test]
fn test_srl_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(HalfCarryFlag);
let mut val = 0x00;
let addr = 0x3727;
mem.write_byte(addr, val);
srl_at_address(&mut mem, addr, &mut freg);
assert!(freg.read() == ZeroFlag);
assert!(mem.read_byte(addr) == 0x00);
val = 0b00110011;
mem.write_byte(addr, val);
srl_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00011001);
assert!(freg.read() == CarryFlag);
srl_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00001100);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_srl() {
let mut reg = Register::new(0x00);
let mut freg = Register::new(HalfCarryFlag);
srl(&mut reg, &mut freg);
assert!(freg.read() == ZeroFlag);
assert!(reg.read() == 0x00);
reg.write(0b00110011);
srl(&mut reg, &mut freg);
assert!(reg.read() == 0b00011001);
assert!(freg.read() == CarryFlag);
srl(&mut reg, &mut freg);
assert!(reg.read() == 0b00001100);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_swap_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x2321;
let mut freg = Register::new(Flags::empty());
mem.write_byte(addr, 0xCB);
swap_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0xBC);
}
#[test]
fn test_swap() {
let mut reg = Register::new(0x00);
let mut freg = Register::new(Flags::empty());
swap(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
reg.write(0xAC);
swap(&mut reg, &mut freg);
assert!(reg.read() == 0xCA);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_sra_at_address() {
let mut val = 0b10010001;
let mut freg = Register::new(Flags::empty());
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x2372;
mem.write_byte(addr, val);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11001000);
assert!(freg.read() == CarryFlag);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11100100);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b00001111);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00000111);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sra() {
let mut reg = Register::new(0b10010001);
let mut freg = Register::new(Flags::empty());
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b11001000);
assert!(freg.read() == CarryFlag);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b11100100);
assert!(freg.read() == Flags::empty());
reg.write(0x00);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
reg.write(0b00001111);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b00000111);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sla_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
sla_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
sla_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
sla_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
sla_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sla() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
sla(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
sla(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
freg.write(CarryFlag);
sla(&mut regc, &mut freg);
assert!(regc.read() == 0b10011000);
assert!(freg.read() == CarryFlag);
regc.write(0b00110011);
sla(&mut regc, &mut freg);
assert!(regc.read() == 0b01100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00000111);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x00);
freg.write(Flags::empty());
rotate_right_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001101);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b01100110);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b11100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right() {
let mut reg = Register::new(0b10011001);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0b01001100);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
freg.write(Flags::empty());
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
reg.write(0x01);
freg.write(CarryFlag);
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x80);
assert!(freg.read() == CarryFlag);
reg.write(0x01);
freg.write(Flags::empty());
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
freg.write(CarryFlag);
rotate_left(&mut regc, &mut freg);
assert!(regc.read() == 0b10011001);
assert!(freg.read() == CarryFlag);
regc.write(0b00110011);
freg.write(Flags::empty());
rotate_left(&mut regc, &mut freg);
assert!(regc.read() == 0b01100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right_with_carry_at_address() {
let mut val = 0b10011001;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1237;
mem.write_byte(addr, val);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11001100);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x0);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x0);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0x01);
freg.write(CarryFlag);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x80);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x01);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x80);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left_with_carry_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_set_at_addr() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1235;
mem.write_byte(addr, 0b11001100);
set_at_addr(&mut mem, addr, 0);
assert!(mem.read_byte(addr) == 0b11001101);
}
#[test]
fn test_set() {
let mut reg = Register::new(0b00110011);
set(&mut reg, 7);
assert!(reg.read() == 0b10110011);
set(&mut reg, 1);
assert!(reg.read() == 0b10110011);
set(&mut reg, 4);
assert!(reg.read() == 0b10110011);
}
#[test]
fn test_res_at_addr() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1235;
mem.write_byte(addr, 0b11001100);
res_at_addr(&mut mem, addr, 2);
assert!(mem.read_byte(addr) == 0b11001000);
}
#[test]
fn test_res() {
let mut reg = Register::new(0b00110011);
res(&mut reg, 0);
assert!(reg.read() == 0b00110010);
res(&mut reg, 5);
assert!(reg.read() == 0b00010010);
res(&mut reg, 7);
assert!(reg.read() == 0b00010010);
}
#[test]
fn test_byte_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1234;
mem.write_byte(addr, 0xFE);
let val = byte_at_address(&mem, addr);
assert!(val == 0xFE);
}
#[test]
fn test_bit() {
let reg = 0b0001111;
let mut freg = Register::new(SubtractFlag | CarryFlag);
bit(reg, 3, &mut freg);
assert!(freg.read() == HalfCarryFlag | CarryFlag);
bit(reg, 7, &mut freg);
assert!(freg.read() == HalfCarryFlag | CarryFlag | ZeroFlag);
}
#[test]
fn test_jp() {
let mut pc = Register::new(0x2311);
let addr = 0x1223;
jp(&mut pc, addr);
assert!(pc.read() == addr);
}
#[test]
fn test_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x123);
mem.write_word(pc.read(), 0xBEEF);
let val = u16_immediate(&mem, &mut pc);
assert!(val == 0xBEEF);
assert!(pc.read() == 0x125);
}
#[test]
fn test_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x123);
mem.write_byte(pc.read(), 0x43);
let val = u8_immediate(&mem, &mut pc);
assert!(val == 0x43);
assert!(pc.read() == 0x124);
}
#[test]
fn test_reti() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut ime = false;
let mut pc = Register::new(0x123);
let mut sp = Register::new(0xFFFC);
mem.write_word(sp.read(), 0xADCD);
reti(&mem, &mut pc, &mut sp, &mut ime);
assert!(pc.read() == 0xADCD);
assert!(ime == true);
assert!(sp.read() == 0xFFFE);
}
#[test]
fn test_sub_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x2736);
let mut reg = Register::new(0x35);
let mut freg = Register::new(Flags::empty());
mem.write_byte(pc.read(), 0x11);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2737);
assert!(reg.read() == 0x24);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
mem.write_byte(pc.read(), 0x0C);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2738);
assert!(reg.read() == 0x18);
assert!(freg.read() == SubtractFlag | CarryFlag);
mem.write_byte(pc.read(), 0x0F);
freg.write(CarryFlag);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, true);
assert!(pc.read() == 0x2739);
assert!(reg.read() == 0x08);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_call() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x4324);
let mut sp = Register::new(0xFFFE);
let addr = 0x10;
call(&mut mem, &mut pc, &mut sp, addr);
assert!(pc.read() == 0x0010);
assert!(sp.read() == 0xFFFC);
assert!(mem.read_word(sp.read()) == 0x4324);
}
#[test]
fn test_add_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x2736);
let mut reg = Register::new(0x05);
let mut freg = Register::new(Flags::empty());
mem.write_byte(pc.read(), 0x11);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2737);
assert!(reg.read() == 0x16);
assert!(freg.read() == Flags::empty());
mem.write_byte(pc.read(), 0x0C);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2738);
assert!(reg.read() == 0x22);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(pc.read(), 0x0F);
freg.write(CarryFlag);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, true);
assert!(pc.read() == 0x2739);
assert!(reg.read() == 0x32);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(pc.read(), 0x01);
reg.write(0xFF);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_push() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xFFAB);
push(&mut mem, &mut sp, 0x8735);
assert!(sp.read() == 0xFFA9);
assert!(mem.read_word(sp.read()) == 0x8735);
}
#[test]
fn test_call_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
let mut sp = Register::new(0xFFAB);
mem.write_word(pc.read(), 0x1234);
call_immediate_if_true(&mut mem, &mut pc, &mut sp, false);
assert!(pc.read() == 0x6544);
pc.write(0x6542);
call_immediate_if_true(&mut mem, &mut pc, &mut sp, true);
assert!(pc.read() == 0x1234);
assert!(sp.read() == 0xFFA9);
assert!(mem.read_word(sp.read()) == 0x6544);
}
#[test]
fn test_jp_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
mem.write_word(pc.read(), 0x1234);
pc.write(0x6542);
jp_u16_immediate(&mem, &mut pc);
assert!(pc.read() == 0x1234);
}
#[test]
fn test_jp_u16_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
mem.write_word(pc.read(), 0x1234);
jp_u16_immediate_if_true(&mem, &mut pc, false);
assert!(pc.read() == 0x6544);
pc.write(0x6542);
jp_u16_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x1234);
}
#[test]
fn test_pop() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xABCD);
let mut hb = Register::new(0x00);
let mut lb = Register::new(0x00);
mem.write_word(sp.read(), 0x1234);
pop(&mem, &mut sp, &mut hb, &mut lb);
assert!(hb.read() == 0x12);
assert!(lb.read() == 0x34);
assert!(sp.read() == 0xABCF);
}
#[test]
fn test_ret() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0xAB);
let mut sp = Register::new(0xABCD);
mem.write_word(sp.read(), 0x1234);
ret(&mem, &mut pc, &mut sp, false);
assert!(pc.read() == 0xAB);
assert!(sp.read() == 0xABCD);
ret(&mem, &mut pc, &mut sp, true);
assert!(pc.read() == 0x1234);
assert!(sp.read() == 0xABCF);
}
#[test]
fn test_or() {
let mut reg = Register::new(0b01010101);
let mut val = 0b11110000;
let mut freg = Register::new(Flags::empty());
or(&mut reg, val, &mut freg);
assert!(reg.read() == 0b11110101);
assert!(freg.read() == Flags::empty());
reg.write(0b00000000);
val = 0x00;
or(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_cp() {
let mut reg = Register::new(0xAA);
let mut val = 0xBB;
let mut freg = Register::new(Flags::empty());
compare(&mut reg, val, &mut freg);
assert!(reg.read() == 0xAA);
assert!(freg.read() == SubtractFlag);
reg.write(0xF0);
val = 0xF0;
compare(&mut reg, val, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_or_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b01000110);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
or_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b11000111);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0b00000000);
reg.write(0x00);
or_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_compare_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0xBA);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0xAB;
mem.write_byte(addr, val);
compare_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0xBA);
assert!(freg.read() == SubtractFlag | CarryFlag);
mem.write_byte(addr, 0xCD);
reg.write(0xCD);
compare_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0xCD);
assert!(freg.read() == SubtractFlag | ZeroFlag | HalfCarryFlag | CarryFlag);
}
#[test]
fn test_and_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b01000110);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
and_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b01000010);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(addr, 0b00000000);
and_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_and() {
let mut reg = Register::new(0b01010101);
let mut val = 0b11110000;
let mut freg = Register::new(Flags::empty());
and(&mut reg, val, &mut freg);
assert!(reg.read() == 0b01010000);
assert!(freg.read() == HalfCarryFlag);
reg.write(0b00001111);
and(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_xor_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b00111100);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
xor_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b11111111);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0b11111111);
xor_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_xor() {
let mut reg = Register::new(0b01010101);
let mut freg = Register::new(Flags::empty());
let mut val = 0b11111111;
xor(&mut reg, val, &mut freg);
assert!(reg.read() == 0b10101010);
assert!(freg.read() == Flags::empty());
val = 0b10101010;
xor(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_sbc_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
let mut reg = Register::new(0xFF);
let addr = 0x1234;
mem.write_byte(addr, 0xCD);
sbc_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0x31);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sub_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
let mut reg = Register::new(0xFF);
let addr = 0x1234;
mem.write_byte(addr, 0xCD);
sub_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0x32);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sbc() {
let mut reg = Register::new(0xFF);
let mut freg = Register::new(Flags::empty());
sbc(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xFF);
freg.write(CarryFlag);
sbc(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xFF);
freg.write(CarryFlag);
sbc(&mut reg, 0xFF, &mut freg);
assert!(reg.read() == 0xFF);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xAB);
freg.write(CarryFlag);
sbc(&mut reg, 0x12, &mut freg);
assert!(reg.read() == 0x98);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sub() {
let mut reg = Register::new(0xFF);
let mut freg = Register::new(Flags::empty());
sub(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0x11);
sub(&mut reg, 0x11, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read().contains(SubtractFlag));
assert!(freg.read().contains(ZeroFlag));
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
reg.write(0xA0);
sub(&mut reg, 0xB0, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0x80);
sub(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0x71);
assert!(freg.read() == SubtractFlag | CarryFlag);
reg.write(0x05);
sub(&mut reg, 0xAB, &mut freg);
assert!(reg.read() == 0x5A);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_adc_value_at_address() {
let address = 0x1038;
let mut mem = EmptyMemory::new(0xFFFF);
let val = 0x10;
let mut reg = Register::new(0x00);
let mut freg = Register::new(CarryFlag);
mem.write_byte(address, val);
adc_value_at_address(&mem, &mut reg, address, &mut freg);
assert!(reg.read() == val + 1);
}
#[test]
fn test_adc() {
let mut first = Register::new(0x05);
let mut flags = Register::new(SubtractFlag | CarryFlag);
adc(&mut first, 0x0A, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
flags.write(CarryFlag);
let mut a = Register::new(0xFA);
adc(&mut a, 0x06, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
flags.write(CarryFlag);
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == Flags::empty());
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_add_value_at_address() {
let mut first = Register::new(0x05);
let mut mem = EmptyMemory::new(0xFFFF);
let mut flags = Register::new(SubtractFlag | CarryFlag);
mem.write_byte(0x8476, 0x0B);
add_value_at_address(&mut mem, &mut first, 0x84, 0x76, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
mem.write_byte(0xADCD, 0x07);
add_value_at_address(&mut mem, &mut a, 0xAD, 0xCD, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
add_value_at_address(&mut mem, &mut a, 0x11, 0x11, &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_u8() {
let mut reg = Register::new(10);
ld_u8(&mut reg, 0x18);
assert!(reg.read() == 0x18);
}
#[test]
fn test_reset_flag() {
let mut freg = Register::new(CarryFlag | ZeroFlag);
ccf(&mut freg);
assert!(!freg.read().contains(CarryFlag));
assert!(freg.read().contains(ZeroFlag));
freg.write(SubtractFlag);
ccf(&mut freg);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCC);
}
#[test]
fn test_set_flag() {
let mut freg = Register::new(Flags::empty());
set_flag(&mut freg, CarryFlag);
assert!(freg.read().contains(CarryFlag));
}
#[test]
fn test_ld_u8_immediate_into_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0xAD12);
mem.write_byte(0xAD12, 0xBB);
ld_u8_immediate_into_address(&mut mem, &mut pc, 0x12, 0x34);
assert!(mem.read_byte(0x1234) == 0xBB);
assert!(pc.read() == 0xAD13);
}
#[test]
fn test_decrement_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
mem.write_byte(0x1010, 1);
decrement_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 0);
assert!(freg.read().is_all());
mem.write_byte(0x01AB, 0x20);
decrement_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x1F);
assert!(freg.read() == CarryFlag | SubtractFlag);
freg.write(ZeroFlag);
mem.write_byte(0xABCD, 0xED);
decrement_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEC);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
}
#[test]
fn test_increment_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
increment_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 1);
assert!(freg.read() == CarryFlag);
mem.write_byte(0x01AB, 0x1F);
increment_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x20);
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
freg.write(SubtractFlag);
mem.write_byte(0xABCD, 0xED);
increment_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEE);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_write_value_to_memory_at_address_and_decrement_register() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x33, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
high_byte.write(0x11);
low_byte.write(0x00);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x1100) == 0x8);
assert!(high_byte.read() == 0x10);
assert!(low_byte.read() == 0xFF);
}
#[test]
fn test_ld_next_two_bytes_into_reg() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_bytes_into_reg(&mem, &mut pc, &mut reg);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDEAB);
}
#[test]
fn test_complement() {
let mut a = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | CarryFlag);
complement(&mut a, &mut freg);
assert!(a.read() == !0x11);
assert!(freg.read().is_all());
freg.write(Flags::empty());
complement(&mut a, &mut freg);
assert!(a.read() == 0x11);
assert!(freg.read().contains(HalfCarryFlag));
assert!(freg.read().contains(SubtractFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_increment_register_pair() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCE);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x1234);
// Forwards
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
mem.write_byte(0x128A, -10 as u8);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x1281, "Should jump back");
// no jump
mem.write_byte(0x1281, 0xFF);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, false);
assert!(pc.read() == 0x1282, "Should not jump if ZeroFlag is not set. PC should increment to go past immediate value");
}
#[test]
fn test_write_value_to_memory_at_address_and_increment_register() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x35, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
low_byte.write(0xFF);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x12FF) == 0x8);
assert!(high_byte.read() == 0x13);
assert!(low_byte.read() == 0x00);
}
#[test]
fn test_jump_by_signed_immediate() {
let mut mem = EmptyMemory::new(0x10000);
let mut pc = Register::new(0x0101);
// 0x8A = -10 as i8
mem.write_byte(0x0101, -10 as u8);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0xF8, "Should jump backwards with negative number");
mem.write_byte(0xF8, 0x37);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0x130);
}
#[test]
fn test_rotate_right_with_carry() {
let mut reg = Register::new(0b10011001);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0b11001100);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
reg.write(0x01);
freg.write(CarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x80);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register_pair() {
let mut reg1 = Register::new(0x70);
let mut reg2 = Register::new(0x00);
decrement_register_pair(&mut reg1, &mut reg2);
assert!(reg1.read() == 0x6F);
assert!(reg2.read() == 0xFF);
}
#[test]
fn test_ld_from_reg_pair_as_address() {
let mut mem = EmptyMemory::new(65000);
let mut rega = Register::new(0x00);
let mut reg1 = Register::new(0x12);
let mut reg2 = Register::new(0x34);
mem.write_byte(0x1234, 0xAA);
ld_from_address(&mem, &mut rega, pack_u16(reg1.read(), reg2.read()));
assert!(rega.read() == 0xAA);
}
#[test]
fn test_add_register_pair_to_register_pair() {
let mut rega = Register::new(0x11);
let mut regb = Register::new(0x11);
let mut reg1 = Register::new(0x11);
let mut reg2 = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | SubtractFlag | HalfCarryFlag | CarryFlag);
// Basic add make sure ZeroFlag isn't affected
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x2222);
assert!(freg.read() == ZeroFlag);
rega.write(0xF1);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x12);
// Carry from bit 15
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x03BD);
assert!(freg.read() == ZeroFlag | CarryFlag);
rega.write(0x1E);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x16);
freg.write(ZeroFlag);
// Carry from bit 11
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x30C1);
println!("{}", freg.read().bits());
assert!(freg.read() == ZeroFlag | HalfCarryFlag);
}
#[test]
fn test_write_stack_pointer_to_address_immediate() {
let mut sp = Register::new(0xBEEF);
let mut pc = Register::new(0x111);
let mut mem = EmptyMemory::new(65647);
mem.write_byte(0x111, 0xAD);
mem.write_byte(0x112, 0xDE);
write_u16_immediate_address(&mut mem, &mut pc, sp.read());
assert!(pc.read() == 0x113);
assert!(mem.read_word(0xDEAD) == 0xBEEF);
}
#[test]
fn test_rotate_left_with_carry() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left_with_carry(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left_with_carry(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
rotate_left_with_carry(&mut regc, &mut freg);
// Carry should get set
assert!(regc.read() == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read() == ZeroFlag | SubtractFlag | HalfCarryFlag);
reg.write(0xF1);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0xF0);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_increment_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(ZeroFlag | HalfCarryFlag);
increment_register(&mut reg, &mut freg);
assert!(reg.read() == 2);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x0F);
increment_register(&mut regb, &mut freg);
assert!(regb.read() == 0x10);
assert!(freg.read() == HalfCarryFlag);
let mut regc = Register::new(0xFF);
freg.write(Flags::empty());
increment_register(&mut regc, &mut freg);
assert!(regc.read() == 0x00);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_add_reg_with_reg() {
let mut first = Register::new(0x05);
let mut second = Register::new(0x0B);
let mut flags = Register::new(SubtractFlag | CarryFlag);
add(&mut first, second.read(), &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
let mut b = Register::new(0x07);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
b.write(0);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_u8_immediate() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_byte(11, 0xFA);
ld_u8_immediate(&mem, &mut pc, &mut reg);
assert!(reg.read() == 0xFA);
assert!(pc.read() == 12);
}
#[test]
fn test_ld_reg_to_reg() {
let mut target = Register::new(5);
let mut source = Register::new(10);
ld_reg_to_reg(&mut target, &source);
assert!(target.read() == 10);
assert!(source.read() == 10);
}
#[test]
fn test_ld_u16_immediate() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
let mut reg2 = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_u16_immediate(&mem, &mut pc, &mut reg, &mut reg2);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDE);
assert!(reg2.read() == 0xAB);
}
#[test]
fn test_write_value_to_memory_at_address() {
let mut mem = EmptyMemory::new(65536);
let mut msb = 0xFF;
let mut lsb = 0x11;
let val = 100;
write_value_to_memory_at_address(&mut mem, val, msb, lsb);
assert!(mem.read_byte(0xFF11) == val, "Memory does match what was written");
}
#[test]
fn test_increment_register_pair() {
let mut msb = Register::new(0x11);
let mut lsb = Register::new(0x11);
increment_register_pair(&mut msb, &mut lsb);
assert!(msb.read() == 0x11);
assert!(lsb.read() == 0x12);
let mut msb_2 = Register::new(0x10);
let mut lsb_2 = Register::new(0xFF);
increment_register_pair(&mut msb_2, &mut lsb_2);
assert!(msb_2.read() == 0x11);
assert!(lsb_2.read() == 0x00);
}
Change zeroflag in shift and rotate instructions
use memory::{EmptyMemory, Memory, pack_u16, high_byte, low_byte, low_nibble, high_nibble};
use extensions::Incrementor;
use cpu::{Register, Flags, CarryFlag, HalfCarryFlag, ZeroFlag, SubtractFlag};
fn half_carry_for_add(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return (low_nibble(val1) + low_nibble(val2) + c) > 0x0F
}
fn carry_for_add(val1: u8, val2: u8, with_carry: bool) -> bool {
let mut c = 0;
if with_carry { c = 1; }
return val1 as u16 + val2 as u16 + c > 0xFF;
}
fn add_internal(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>, with_carry: bool) {
let val1 = first.read();
let val2 = second;
let mut result = val1 + val2;
let mut do_carry = false;
if freg.read().contains(CarryFlag) && with_carry {
do_carry = true;
result += 1;
}
let mut flags = Flags::empty();
if half_carry_for_add(val1, val2, do_carry) {
flags = HalfCarryFlag;
}
if carry_for_add(val1, val2, do_carry) {
flags = flags | CarryFlag;
}
if result == 0 {
flags = ZeroFlag;
}
first.write(result);
freg.write(flags);
}
/// Add the value of two registers and store it in the first register
pub fn add(first: &mut Register<u8>, second: u8, freg: &mut Register<Flags>) {
add_internal(first, second, freg, false);
}
/// Load the value from one register into another
pub fn ld_reg_to_reg(target: &mut Register<u8>, source: &Register<u8>) {
debug!("ld_reg_to_reg: {:X}", source.read());
let val = source.read();
target.write(val);
}
/// Loads the memory pointed to by the next two bytes into a register
pub fn ld_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let val = u8_immediate(mem, pc);
debug!("ld_next_byte_to_reg: {:X}", val);
reg.write(val);
}
/// Loads the next two bytes into the passed in registers
pub fn ld_u16_immediate(mem: &Memory, pc: &mut Register<u16>,
hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = u16_immediate(mem, pc);
debug!("ld_next_two_bytes_into_reg_pair: {:X}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
/// Writes the passed in value to memory at the address pointed to by combined address parameters
pub fn write_value_to_memory_at_address(mem: &mut Memory, val: u8, addr_msb: u8, addr_lsb: u8) {
let addr = pack_u16(addr_msb, addr_lsb);
debug!("write val to mem addr: {:X} val: {:X}", addr, val);
mem.write_byte(addr as u16, val);
}
/// Increments the pair of registers as if they represent a 16-bit value
pub fn increment_register_pair(msb: &mut Register<u8>,lsb: &mut Register<u8>) {
debug!("increment register pair");
let incremented_val = ((msb.read() as uint) << 8) + lsb.read() as uint + 1;
msb.write(high_byte(incremented_val as u16));
lsb.write(low_byte(incremented_val as u16));
}
/// Increment register by 1
/// Set ZeroFlag if result is 0
/// Set HalfCarryFlag if there is a carry from bit 3
pub fn increment_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(HalfCarryFlag);
flags.remove(ZeroFlag);
if low_nibble(val) == 0xF {
flags.insert(HalfCarryFlag);
}
reg.increment();
if reg.read() == 0 {
flags.insert(ZeroFlag);
}
debug!("increment reg new_val: {:X}", reg.read());
freg.write(flags);
}
/// Decrement register by 1
/// Set ZeroFlag if result is 0
/// Set SubtractFlag
/// Set HalfCarryFlag if there is no borrow from bit 4
pub fn decrement_register(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
let mut flags = freg.read() | SubtractFlag;
flags.remove(ZeroFlag);
flags.remove(HalfCarryFlag);
if (val & 0x0F) > 0 {
flags = flags | HalfCarryFlag;
}
reg.decrement();
if reg.read() == 0x00 {
flags = flags | ZeroFlag;
}
freg.write(flags);
}
fn internal_rotate_left_with_carry(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let bit_7 = (val & 0x80) >> 7;
let result = (val << 1) | bit_7;
if result == 0 {
freg.write(ZeroFlag);
return val;
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return result;
}
/// Rotate register left
/// Set ZeroFlag if result is zero
/// Set CarryFlag if bit 7 is 1
pub fn rotate_left_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_left_with_carry(reg.read(), freg);
reg.write(val);
}
pub fn rotate_left_with_carry_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_left_with_carry(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
/// Write sp to address with value of next two bytes
pub fn write_u16_immediate_address(mem: &mut Memory, pc: &mut Register<u16>, val: u16){
let addr = u16_immediate(mem, pc);
debug!("Writing {} to {}", val, addr);
mem.write_word(addr, val);
}
/// Performs no operation and consumes a cycle
pub fn nop() {
}
/// Adds two sets of registers as 16 bit numbers with carries counted on bit 11 and 15
pub fn add_register_pair_to_register_pair(rega: &mut Register<u8>, regb: &mut Register<u8>, reg1: u8, reg2: u8, freg: &mut Register<Flags>) {
let first = pack_u16(rega.read(), regb.read());
let second = pack_u16(reg1, reg2);
let sum = first + second;
// Reset subtract flag, leave ZeroFlag alone
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.remove(CarryFlag);
flags.remove(HalfCarryFlag);
if high_nibble(rega.read()) + high_nibble(reg1) > 15 {
flags.insert(CarryFlag)
}
if low_nibble(rega.read()) + low_nibble(reg1) > 15 {
flags.insert(HalfCarryFlag);
}
rega.write(high_byte(sum));
regb.write(low_byte(sum));
freg.write(flags);
}
pub fn ld_from_address(mem: &Memory, reg: &mut Register<u8>, addr: u16) {
let val = mem.read_byte(addr);
reg.write(val);
}
pub fn decrement_register_pair(reg1: &mut Register<u8>, reg2: &mut Register<u8>) {
let val = pack_u16(reg1.read(), reg2.read());
let ans = val - 1;
reg1.write(high_byte(ans));
reg2.write(low_byte(ans));
}
fn internal_rotate_right_with_carry(val: u8, freg: &mut Register<Flags>) -> u8 {
let bit_1 = (val & 0x01) << 7;
let result = ((val >> 1) | bit_1);
if result == 0 {
freg.write(ZeroFlag);
}
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
return result;
}
pub fn rotate_right_with_carry(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_right_with_carry(reg.read(), freg);
reg.write(val);
}
pub fn rotate_right_with_carry_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_right_with_carry(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
/// Add n to current address and jump to it - n = one byte signed immediate value
pub fn jump_by_signed_immediate(mem: &Memory, pc: &mut Register<u16>) {
let offset = u8_immediate(mem, pc);
let current_pc = pc.read();
let mut new_pc = 0;
if (offset & 0x80) == 0 {
new_pc = current_pc + offset as u16;
debug!("jmp signed immediate new_pc: {:X}", new_pc);
} else {
new_pc = (current_pc as i16 + (offset as i8) as i16) as u16;
debug!("jmp signed immediate new_pc: {:X}", new_pc);
}
debug!("jmp {:X}", new_pc);
pc.write(new_pc);
}
pub fn write_value_to_memory_at_address_and_increment_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address + 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn write_value_to_memory_at_address_and_decrement_register(mem: &mut Memory, val: u8, high_reg: &mut Register<u8>, low_reg: &mut Register<u8>) {
let address = pack_u16(high_reg.read(), low_reg.read());
mem.write_byte(address, val);
let new_address = address - 1;
high_reg.write(high_byte(new_address));
low_reg.write(low_byte(new_address));
}
pub fn relative_jmp_by_signed_immediate_if_true(mem: &Memory, pc: &mut Register<u16>, should_jump: bool) {
if should_jump {
jump_by_signed_immediate(mem, pc);
} else {
pc.increment();
}
}
pub fn ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
increment_register_pair(high_byte, low_byte);
debug!("ld from address and inc reg pair - addr: {:X} val: {:X} reg_pair_val: {:X}", address, val, pack_u16(high_byte.read(), low_byte.read()));
}
pub fn complement(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = reg.read();
reg.write(!val);
let mut flags = freg.read();
flags.insert(HalfCarryFlag);
flags.insert(SubtractFlag);
freg.write(flags);
}
/// Loads the next two bytes into the passed in register (sp)
pub fn ld_next_two_bytes_into_reg(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u16>) {
let val = u16_immediate(mem, pc);
debug!("ld_next_two_bytes_into_reg: {:X}", val);
reg.write(val);
}
pub fn increment_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
increment_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn decrement_value_at_address(mem: &mut Memory, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let addr = pack_u16(hb, lb);
let val = mem.read_byte(addr);
let mut reg = Register::new(val);
decrement_register(&mut reg, freg);
mem.write_byte(addr, reg.read());
}
pub fn ld_u8_immediate_into_address(mem: &mut Memory, pc: &mut Register<u16>, hb: u8, lb: u8) {
let addr = pack_u16(hb, lb);
let val = u8_immediate(mem, pc);
mem.write_byte(addr, val);
}
pub fn set_flag(freg: &mut Register<Flags>, flag: Flags) {
let mut flags = freg.read();
flags.insert(flag);
freg.write(flags);
}
pub fn ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(mem: &Memory, reg: &mut Register<u8>, high_byte: &mut Register<u8>, low_byte: &mut Register<u8>) {
let address = pack_u16(high_byte.read(), low_byte.read());
let val = mem.read_byte(address);
reg.write(val);
decrement_register_pair(high_byte, low_byte);
}
pub fn ccf(freg: &mut Register<Flags>) {
let mut f = freg.read();
f.remove(SubtractFlag);
f.remove(HalfCarryFlag);
f.toggle(CarryFlag);
freg.write(f);
}
pub fn ld_u8(reg: &mut Register<u8>, val: u8) {
debug!("load val: {:X} into register", val);
reg.write(val);
}
pub fn add_value_at_address(mem: &Memory, reg: &mut Register<u8>, hb: u8, lb: u8, freg: &mut Register<Flags>) {
let val = mem.read_byte(pack_u16(hb, lb));
add(reg, val, freg);
}
pub fn adc(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
add_internal(reg, val, freg, true);
}
pub fn adc_value_at_address(mem: &Memory, reg: &mut Register<u8>, address: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(address);
adc(reg, val, freg);
}
fn half_carry_for_subtract(val1: u8, val2: u8, carry: u8) -> bool {
if low_nibble(val2) + carry == 0x10 && val1 >= 0x10 {
return true;
} else {
return low_nibble(val1) >= (low_nibble(val2) + carry);
}
}
fn carry_for_subtract(val1: u8, val2: u8, carry: u8) -> bool {
return val1 >= (val2 + carry);
}
pub fn internal_sub(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>, with_carry: bool) {
let reg_val = reg.read();
let mut flags = SubtractFlag;
let mut carry = 0;
if freg.read().contains(CarryFlag) && with_carry {
carry = 1;
}
if half_carry_for_subtract(reg_val, val, carry) {
flags.insert(HalfCarryFlag);
}
if carry_for_subtract(reg_val, val, carry) {
flags.insert(CarryFlag);
}
let result = reg_val - val - carry;
if result == 0 {
flags.insert(ZeroFlag);
}
debug!("internal sub result {:X}", result);
reg.write(result);
freg.write(flags);
}
pub fn sub(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
internal_sub(reg, val, freg, false);
}
pub fn sbc(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
internal_sub(reg, val, freg, true);
}
pub fn sub_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
internal_sub(reg, val, freg, false);
}
pub fn sbc_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
internal_sub(reg, val, freg, true);
}
pub fn and(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = HalfCarryFlag;
let result = reg.read() & val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn xor(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = Flags::empty();
let result = reg.read() ^ val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn and_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
and(reg, val, freg);
}
pub fn xor_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
xor(reg, val, freg);
}
pub fn or(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let mut flags = Flags::empty();
let result = reg.read() | val;
if result == 0 {
flags.insert(ZeroFlag);
}
reg.write(result);
freg.write(flags);
}
pub fn compare(reg: &mut Register<u8>, val: u8, freg: &mut Register<Flags>) {
let reg_val = reg.read();
let mut flags = SubtractFlag;
let mut carry = 0;
if half_carry_for_subtract(reg_val, val, carry) {
flags.insert(HalfCarryFlag);
}
if carry_for_subtract(reg_val, val, carry) {
flags.insert(CarryFlag);
}
if (reg_val == val) {
flags.insert(ZeroFlag);
}
freg.write(flags);
}
pub fn or_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
or(reg, val, freg);
}
pub fn compare_value_at_address(mem: &Memory, reg: &mut Register<u8>, addr: u16, freg: &mut Register<Flags>) {
let val = mem.read_byte(addr);
compare(reg, val, freg);
}
pub fn push(mem: &mut Memory, sp: &mut Register<u16>, val: u16) {
sp.decrement();
sp.decrement();
debug!("push sp:{:X} val:{:X}", sp.read(), val);
mem.write_word(sp.read(), val);
}
fn pop_internal(mem: &Memory, sp: &mut Register<u16>) -> u16 {
let val = mem.read_word(sp.read());
sp.increment();
sp.increment();
return val;
}
pub fn pop(mem: &Memory, sp: &mut Register<u16>, hb: &mut Register<u8>, lb: &mut Register<u8>) {
let val = pop_internal(mem, sp);
debug!("pop val: {:X}", val);
hb.write(high_byte(val));
lb.write(low_byte(val));
}
pub fn ret(mem: &Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, should_execute: bool) {
if should_execute {
let addr = pop_internal(mem, sp);
debug!("ret to {:X}", addr);
pc.write(addr);
}
}
pub fn jp_u16_immediate(mem: &Memory, pc: &mut Register<u16>) {
let addr = u16_immediate(mem, pc);
debug!("jmp {:X}", addr);
pc.write(addr);
}
pub fn jp_u16_immediate_if_true(mem: &Memory,pc: &mut Register<u16>, should_jump: bool) {
if should_jump {
jp_u16_immediate(mem, pc);
} else {
pc.increment();
pc.increment();
}
}
pub fn call_immediate_if_true(mem: &mut Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, should_jump: bool) {
if should_jump {
let new_addr = u16_immediate(mem, pc);
call(mem, pc, sp, new_addr);
} else {
pc.increment();
pc.increment();
}
}
pub fn add_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>, freg: &mut Register<Flags>, with_carry: bool) {
let val = u8_immediate(mem, pc);
add_internal(reg, val, freg, with_carry);
}
pub fn call(mem: &mut Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, addr: u16) {
push(mem, sp, pc.read());
pc.write(addr);
}
pub fn sub_u8_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>, freg: &mut Register<Flags>, with_carry: bool) {
let val = u8_immediate(mem, pc);
internal_sub(reg, val, freg, with_carry);
}
pub fn reti(mem: &Memory, pc: &mut Register<u16>, sp: &mut Register<u16>, ime: &mut bool) {
ret(mem, pc, sp, true);
*ime = true;
}
pub fn u8_immediate(mem: &Memory, pc: &mut Register<u16>) -> u8 {
let val = mem.read_byte(pc.read());
pc.increment();
return val;
}
pub fn u16_immediate(mem: &Memory, pc: &mut Register<u16>) -> u16 {
let val = mem.read_word(pc.read());
pc.increment();
pc.increment();
return val;
}
pub fn jp(pc: &mut Register<u16>, addr: u16) {
pc.write(addr);
}
pub fn bit(reg_val: u8, pos: u8, freg: &mut Register<Flags>) {
let mut flags = freg.read();
flags.remove(SubtractFlag);
flags.insert(HalfCarryFlag);
assert!(pos < 8, "Bit positions are 0-7");
let mask = 0x01 << pos as uint;
let val = reg_val & mask;
if val == 0 {
flags.insert(ZeroFlag);
} else {
flags.remove(ZeroFlag);
}
freg.write(flags);
}
pub fn byte_at_address(mem: &Memory, addr: u16) -> u8 {
return mem.read_byte(addr);
}
pub fn res(reg: &mut Register<u8>, pos: u8) {
let mask = 0x01 << pos as uint;
let val = reg.read();
let reset_val = val & (!mask);
reg.write(reset_val);
}
pub fn res_at_addr(mem: &mut Memory, address: u16, pos: u8) {
let mask = 0x01 << pos as uint;
let val = mem.read_byte(address);
let reset_val = val & (!mask);
mem.write_byte(address, reset_val);
}
pub fn set(reg: &mut Register<u8>, pos: u8) {
let mask = 0x01 << pos as uint;
let val = reg.read();
let reset_val = val | mask;
reg.write(reset_val);
}
pub fn set_at_addr(mem: &mut Memory, address: u16, pos: u8) {
let mask = 0x01 << pos as uint;
let val = mem.read_byte(address);
let reset_val = val | mask;
mem.write_byte(address, reset_val);
}
pub fn rotate_left(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_left(reg.read(), freg);
reg.write(val);
}
pub fn rotate_left_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_left(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_rotate_left(val: u8, freg: &mut Register<Flags>) -> u8 {
let mut carry = 0;
if freg.read().contains(CarryFlag) {
carry = 1;
}
let result = (val << 1) | carry;
freg.write(Flags::empty());
if result == 0 {
freg.write(ZeroFlag);
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return result;
}
fn internal_rotate_right(val: u8, freg: &mut Register<Flags>) -> u8 {
let mut carry = 0;
if freg.read().contains(CarryFlag) {
carry = 0x80;
}
let result = (val >> 1) | carry;
freg.write(Flags::empty());
if result == 0 {
freg.write(ZeroFlag);
}
if val & 0x01 != 0 {
freg.write(CarryFlag);
}
return result;
}
pub fn rotate_right(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_rotate_right(reg.read(), freg);
reg.write(val);
}
pub fn rotate_right_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_rotate_right(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_sla(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let result = val << 1;
if result == 0 {
freg.write(ZeroFlag);
}
if val & 0x80 != 0 {
freg.write(CarryFlag);
}
return result;
}
pub fn sla(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sla(reg.read(), freg);
reg.write(val);
}
pub fn sla_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sla(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_sra(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let bit_7 = val & 0x80;
let result = (val >> 1) | bit_7;
if result == 0 {
freg.write(ZeroFlag);
return 0;
}
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
return result;
}
pub fn sra(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sra(reg.read(), freg);
reg.write(val);
}
pub fn sra_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sra(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_swap(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let result = (low_nibble(val) << 4) + high_nibble(val);
if result == 0 {
freg.write(ZeroFlag);
}
return result;
}
pub fn swap(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_swap(reg.read(), freg);
reg.write(val);
}
pub fn swap_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_swap(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
fn internal_srl(val: u8, freg: &mut Register<Flags>) -> u8 {
freg.write(Flags::empty());
let result = val >> 1;
if result == 0 {
freg.write(ZeroFlag);
}
if val & 0x01 == 1 {
freg.write(CarryFlag);
}
return result;
}
pub fn srl(reg: &mut Register<u8>, freg: &mut Register<Flags>) {
let val = internal_sra(reg.read(), freg);
reg.write(val);
}
pub fn srl_at_address(mem: &mut Memory, addr: u16, freg: &mut Register<Flags>) {
let val = internal_sra(mem.read_byte(addr), freg);
mem.write_byte(addr, val);
}
pub fn disable_interrupts(ime: &mut bool) {
debug!("disable interrupts");
*ime = false;
}
pub fn write_value_to_u16_immediate(mem: &mut Memory, pc: &mut Register<u16>, val: u8) {
let addr = u16_immediate(mem, pc);
debug!("write value to u16 immediate val: {:X} addr: {:X}", val, addr);
mem.write_byte(addr, val);
}
pub fn write_val_FF00_plus_immediate(mem: &mut Memory, pc: &mut Register<u16>, val: u8) {
let lb = u8_immediate(mem, pc);
let addr = 0xFF00 + lb as u16;
debug!("write val: {:X} to addr: {:X}", val, addr);
mem.write_byte(addr, val);
}
pub fn pop_flags(mem: &Memory, sp: &mut Register<u16>, a: &mut Register<u8>, f: &mut Register<Flags>) {
let val = pop_internal(mem, sp);
debug!("POP AF val: {:X}", val);
a.write(high_byte(val));
f.write(Flags::from_bits_truncate(low_byte(val)));
}
pub fn load_val_FF00_plus_immediate(mem: &Memory, pc: &mut Register<u16>, reg: &mut Register<u8>) {
let lb = u8_immediate(mem, pc);
let addr = 0xFF00 + lb as u16;
let val = mem.read_byte(addr);
debug!("load val: {:X} into a", val);
reg.write(val);
}
#[test]
fn test_load_val_FF00_plus_immediate() {
}
#[test]
fn test_pop_flags() {
let mut a = Register::new(0x00);
let mut f = Register::new(HalfCarryFlag);
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xFFFC);
mem.write_word(sp.read(), 0xBCDF);
pop_flags(&mem, &mut sp, &mut a, &mut f);
assert!(sp.read() == 0xFFFE);
assert!(a.read() == 0xBC);
assert!(f.read() == ZeroFlag | SubtractFlag | CarryFlag);
}
#[test]
fn test_write_val_FF00_plus_immediate() {
}
#[test]
fn test_write_value_to_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x8284;
let val = 0x92;
let mut pc = Register::new(0x1234);
mem.write_word(0x1234, addr);
write_value_to_u16_immediate(&mut mem, &mut pc, val);
assert!(pc.read() == 0x1236);
assert!(mem.read_byte(addr) == val);
}
#[test]
fn test_disable_interrupts() {
let mut a = true;
disable_interrupts(&mut a);
assert!(a == false);
}
#[test]
fn test_srl_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(HalfCarryFlag);
let mut val = 0x00;
let addr = 0x3727;
mem.write_byte(addr, val);
srl_at_address(&mut mem, addr, &mut freg);
assert!(freg.read() == ZeroFlag);
assert!(mem.read_byte(addr) == 0x00);
val = 0b00110011;
mem.write_byte(addr, val);
srl_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00011001);
assert!(freg.read() == CarryFlag);
srl_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00001100);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_srl() {
let mut reg = Register::new(0x00);
let mut freg = Register::new(HalfCarryFlag);
srl(&mut reg, &mut freg);
assert!(freg.read() == ZeroFlag);
assert!(reg.read() == 0x00);
reg.write(0b00110011);
srl(&mut reg, &mut freg);
assert!(reg.read() == 0b00011001);
assert!(freg.read() == CarryFlag);
srl(&mut reg, &mut freg);
assert!(reg.read() == 0b00001100);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_swap_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x2321;
let mut freg = Register::new(Flags::empty());
mem.write_byte(addr, 0xCB);
swap_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0xBC);
}
#[test]
fn test_swap() {
let mut reg = Register::new(0x00);
let mut freg = Register::new(Flags::empty());
swap(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
reg.write(0xAC);
swap(&mut reg, &mut freg);
assert!(reg.read() == 0xCA);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_sra_at_address() {
let mut val = 0b10010001;
let mut freg = Register::new(Flags::empty());
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x2372;
mem.write_byte(addr, val);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11001000);
assert!(freg.read() == CarryFlag);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11100100);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b00001111);
sra_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b00000111);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sra() {
let mut reg = Register::new(0b10010001);
let mut freg = Register::new(Flags::empty());
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b11001000);
assert!(freg.read() == CarryFlag);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b11100100);
assert!(freg.read() == Flags::empty());
reg.write(0x00);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
reg.write(0b00001111);
sra(&mut reg, &mut freg);
assert!(reg.read() == 0b00000111);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sla_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
sla_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
sla_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
sla_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
sla_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_sla() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
sla(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
sla(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
freg.write(CarryFlag);
sla(&mut regc, &mut freg);
assert!(regc.read() == 0b10011000);
assert!(freg.read() == CarryFlag);
regc.write(0b00110011);
sla(&mut regc, &mut freg);
assert!(regc.read() == 0b01100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00000111);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x00);
freg.write(Flags::empty());
rotate_right_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001101);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b01100110);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
rotate_right_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b11100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right() {
let mut reg = Register::new(0b10011001);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0b01001100);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
freg.write(Flags::empty());
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
reg.write(0x01);
freg.write(CarryFlag);
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x80);
assert!(freg.read() == CarryFlag);
reg.write(0x01);
freg.write(Flags::empty());
rotate_right(&mut reg, &mut freg);
assert!(reg.read() == 0x00);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011000);
assert!(freg.read() == CarryFlag);
freg.write(CarryFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
freg.write(CarryFlag);
rotate_left(&mut regc, &mut freg);
assert!(regc.read() == 0b10011001);
assert!(freg.read() == CarryFlag);
regc.write(0b00110011);
freg.write(Flags::empty());
rotate_left(&mut regc, &mut freg);
assert!(regc.read() == 0b01100110);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_rotate_right_with_carry_at_address() {
let mut val = 0b10011001;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1237;
mem.write_byte(addr, val);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0b11001100);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x0);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x0);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0x01);
freg.write(CarryFlag);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x80);
assert!(freg.read() == CarryFlag);
mem.write_byte(addr, 0x01);
rotate_right_with_carry_at_address(&mut mem, addr, &mut freg);
assert!(mem.read_byte(addr) == 0x80);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_rotate_left_with_carry_at_address() {
let mut val = 0b00001111;
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1423;
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
mem.write_byte(addr, val);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Rotate should happen
assert!(mem.read_byte(addr) == 0b00011110);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0x00);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Zero should return zero with ZeroFlag
assert!(mem.read_byte(addr) == 0x00);
assert!(freg.read() == ZeroFlag);
mem.write_byte(addr, 0b11001100);
rotate_left_with_carry_at_address(&mut mem, addr, &mut freg);
// Carry should get set
assert!(mem.read_byte(addr) == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_set_at_addr() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1235;
mem.write_byte(addr, 0b11001100);
set_at_addr(&mut mem, addr, 0);
assert!(mem.read_byte(addr) == 0b11001101);
}
#[test]
fn test_set() {
let mut reg = Register::new(0b00110011);
set(&mut reg, 7);
assert!(reg.read() == 0b10110011);
set(&mut reg, 1);
assert!(reg.read() == 0b10110011);
set(&mut reg, 4);
assert!(reg.read() == 0b10110011);
}
#[test]
fn test_res_at_addr() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1235;
mem.write_byte(addr, 0b11001100);
res_at_addr(&mut mem, addr, 2);
assert!(mem.read_byte(addr) == 0b11001000);
}
#[test]
fn test_res() {
let mut reg = Register::new(0b00110011);
res(&mut reg, 0);
assert!(reg.read() == 0b00110010);
res(&mut reg, 5);
assert!(reg.read() == 0b00010010);
res(&mut reg, 7);
assert!(reg.read() == 0b00010010);
}
#[test]
fn test_byte_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let addr = 0x1234;
mem.write_byte(addr, 0xFE);
let val = byte_at_address(&mem, addr);
assert!(val == 0xFE);
}
#[test]
fn test_bit() {
let reg = 0b0001111;
let mut freg = Register::new(SubtractFlag | CarryFlag);
bit(reg, 3, &mut freg);
assert!(freg.read() == HalfCarryFlag | CarryFlag);
bit(reg, 7, &mut freg);
assert!(freg.read() == HalfCarryFlag | CarryFlag | ZeroFlag);
}
#[test]
fn test_jp() {
let mut pc = Register::new(0x2311);
let addr = 0x1223;
jp(&mut pc, addr);
assert!(pc.read() == addr);
}
#[test]
fn test_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x123);
mem.write_word(pc.read(), 0xBEEF);
let val = u16_immediate(&mem, &mut pc);
assert!(val == 0xBEEF);
assert!(pc.read() == 0x125);
}
#[test]
fn test_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x123);
mem.write_byte(pc.read(), 0x43);
let val = u8_immediate(&mem, &mut pc);
assert!(val == 0x43);
assert!(pc.read() == 0x124);
}
#[test]
fn test_reti() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut ime = false;
let mut pc = Register::new(0x123);
let mut sp = Register::new(0xFFFC);
mem.write_word(sp.read(), 0xADCD);
reti(&mem, &mut pc, &mut sp, &mut ime);
assert!(pc.read() == 0xADCD);
assert!(ime == true);
assert!(sp.read() == 0xFFFE);
}
#[test]
fn test_sub_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x2736);
let mut reg = Register::new(0x35);
let mut freg = Register::new(Flags::empty());
mem.write_byte(pc.read(), 0x11);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2737);
assert!(reg.read() == 0x24);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
mem.write_byte(pc.read(), 0x0C);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2738);
assert!(reg.read() == 0x18);
assert!(freg.read() == SubtractFlag | CarryFlag);
mem.write_byte(pc.read(), 0x0F);
freg.write(CarryFlag);
sub_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, true);
assert!(pc.read() == 0x2739);
assert!(reg.read() == 0x08);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_call() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x4324);
let mut sp = Register::new(0xFFFE);
let addr = 0x10;
call(&mut mem, &mut pc, &mut sp, addr);
assert!(pc.read() == 0x0010);
assert!(sp.read() == 0xFFFC);
assert!(mem.read_word(sp.read()) == 0x4324);
}
#[test]
fn test_add_u8_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x2736);
let mut reg = Register::new(0x05);
let mut freg = Register::new(Flags::empty());
mem.write_byte(pc.read(), 0x11);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2737);
assert!(reg.read() == 0x16);
assert!(freg.read() == Flags::empty());
mem.write_byte(pc.read(), 0x0C);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(pc.read() == 0x2738);
assert!(reg.read() == 0x22);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(pc.read(), 0x0F);
freg.write(CarryFlag);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, true);
assert!(pc.read() == 0x2739);
assert!(reg.read() == 0x32);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(pc.read(), 0x01);
reg.write(0xFF);
add_u8_immediate(&mem, &mut pc, &mut reg, &mut freg, false);
assert!(reg.read() == 0x00);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_push() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xFFAB);
push(&mut mem, &mut sp, 0x8735);
assert!(sp.read() == 0xFFA9);
assert!(mem.read_word(sp.read()) == 0x8735);
}
#[test]
fn test_call_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
let mut sp = Register::new(0xFFAB);
mem.write_word(pc.read(), 0x1234);
call_immediate_if_true(&mut mem, &mut pc, &mut sp, false);
assert!(pc.read() == 0x6544);
pc.write(0x6542);
call_immediate_if_true(&mut mem, &mut pc, &mut sp, true);
assert!(pc.read() == 0x1234);
assert!(sp.read() == 0xFFA9);
assert!(mem.read_word(sp.read()) == 0x6544);
}
#[test]
fn test_jp_u16_immediate() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
mem.write_word(pc.read(), 0x1234);
pc.write(0x6542);
jp_u16_immediate(&mem, &mut pc);
assert!(pc.read() == 0x1234);
}
#[test]
fn test_jp_u16_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x6542);
mem.write_word(pc.read(), 0x1234);
jp_u16_immediate_if_true(&mem, &mut pc, false);
assert!(pc.read() == 0x6544);
pc.write(0x6542);
jp_u16_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x1234);
}
#[test]
fn test_pop() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut sp = Register::new(0xABCD);
let mut hb = Register::new(0x00);
let mut lb = Register::new(0x00);
mem.write_word(sp.read(), 0x1234);
pop(&mem, &mut sp, &mut hb, &mut lb);
assert!(hb.read() == 0x12);
assert!(lb.read() == 0x34);
assert!(sp.read() == 0xABCF);
}
#[test]
fn test_ret() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0xAB);
let mut sp = Register::new(0xABCD);
mem.write_word(sp.read(), 0x1234);
ret(&mem, &mut pc, &mut sp, false);
assert!(pc.read() == 0xAB);
assert!(sp.read() == 0xABCD);
ret(&mem, &mut pc, &mut sp, true);
assert!(pc.read() == 0x1234);
assert!(sp.read() == 0xABCF);
}
#[test]
fn test_or() {
let mut reg = Register::new(0b01010101);
let mut val = 0b11110000;
let mut freg = Register::new(Flags::empty());
or(&mut reg, val, &mut freg);
assert!(reg.read() == 0b11110101);
assert!(freg.read() == Flags::empty());
reg.write(0b00000000);
val = 0x00;
or(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_cp() {
let mut reg = Register::new(0xAA);
let mut val = 0xBB;
let mut freg = Register::new(Flags::empty());
compare(&mut reg, val, &mut freg);
assert!(reg.read() == 0xAA);
assert!(freg.read() == SubtractFlag);
reg.write(0xF0);
val = 0xF0;
compare(&mut reg, val, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_or_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b01000110);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
or_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b11000111);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0b00000000);
reg.write(0x00);
or_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_compare_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0xBA);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0xAB;
mem.write_byte(addr, val);
compare_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0xBA);
assert!(freg.read() == SubtractFlag | CarryFlag);
mem.write_byte(addr, 0xCD);
reg.write(0xCD);
compare_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0xCD);
assert!(freg.read() == SubtractFlag | ZeroFlag | HalfCarryFlag | CarryFlag);
}
#[test]
fn test_and_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b01000110);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
and_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b01000010);
assert!(freg.read() == HalfCarryFlag);
mem.write_byte(addr, 0b00000000);
and_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_and() {
let mut reg = Register::new(0b01010101);
let mut val = 0b11110000;
let mut freg = Register::new(Flags::empty());
and(&mut reg, val, &mut freg);
assert!(reg.read() == 0b01010000);
assert!(freg.read() == HalfCarryFlag);
reg.write(0b00001111);
and(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_xor_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0b00111100);
let mut freg = Register::new(Flags::empty());
let addr = 0x1239;
let mut val = 0b11000011;
mem.write_byte(addr, val);
xor_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b11111111);
assert!(freg.read() == Flags::empty());
mem.write_byte(addr, 0b11111111);
xor_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_xor() {
let mut reg = Register::new(0b01010101);
let mut freg = Register::new(Flags::empty());
let mut val = 0b11111111;
xor(&mut reg, val, &mut freg);
assert!(reg.read() == 0b10101010);
assert!(freg.read() == Flags::empty());
val = 0b10101010;
xor(&mut reg, val, &mut freg);
assert!(reg.read() == 0b00000000);
assert!(freg.read() == ZeroFlag);
}
#[test]
fn test_sbc_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
let mut reg = Register::new(0xFF);
let addr = 0x1234;
mem.write_byte(addr, 0xCD);
sbc_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0x31);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sub_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
let mut reg = Register::new(0xFF);
let addr = 0x1234;
mem.write_byte(addr, 0xCD);
sub_value_at_address(&mem, &mut reg, addr, &mut freg);
assert!(reg.read() == 0x32);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sbc() {
let mut reg = Register::new(0xFF);
let mut freg = Register::new(Flags::empty());
sbc(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xFF);
freg.write(CarryFlag);
sbc(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xFF);
freg.write(CarryFlag);
sbc(&mut reg, 0xFF, &mut freg);
assert!(reg.read() == 0xFF);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0xAB);
freg.write(CarryFlag);
sbc(&mut reg, 0x12, &mut freg);
assert!(reg.read() == 0x98);
assert!(freg.read() == SubtractFlag | CarryFlag | HalfCarryFlag);
}
#[test]
fn test_sub() {
let mut reg = Register::new(0xFF);
let mut freg = Register::new(Flags::empty());
sub(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag | CarryFlag);
reg.write(0x11);
sub(&mut reg, 0x11, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read().contains(SubtractFlag));
assert!(freg.read().contains(ZeroFlag));
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
reg.write(0xA0);
sub(&mut reg, 0xB0, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0x80);
sub(&mut reg, 0x0f, &mut freg);
assert!(reg.read() == 0x71);
assert!(freg.read() == SubtractFlag | CarryFlag);
reg.write(0x05);
sub(&mut reg, 0xAB, &mut freg);
assert!(reg.read() == 0x5A);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_adc_value_at_address() {
let address = 0x1038;
let mut mem = EmptyMemory::new(0xFFFF);
let val = 0x10;
let mut reg = Register::new(0x00);
let mut freg = Register::new(CarryFlag);
mem.write_byte(address, val);
adc_value_at_address(&mem, &mut reg, address, &mut freg);
assert!(reg.read() == val + 1);
}
#[test]
fn test_adc() {
let mut first = Register::new(0x05);
let mut flags = Register::new(SubtractFlag | CarryFlag);
adc(&mut first, 0x0A, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
flags.write(CarryFlag);
let mut a = Register::new(0xFA);
adc(&mut a, 0x06, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
flags.write(CarryFlag);
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == Flags::empty());
a.write(0);
adc(&mut a, 0, &mut flags);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_add_value_at_address() {
let mut first = Register::new(0x05);
let mut mem = EmptyMemory::new(0xFFFF);
let mut flags = Register::new(SubtractFlag | CarryFlag);
mem.write_byte(0x8476, 0x0B);
add_value_at_address(&mut mem, &mut first, 0x84, 0x76, &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
mem.write_byte(0xADCD, 0x07);
add_value_at_address(&mut mem, &mut a, 0xAD, 0xCD, &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
add_value_at_address(&mut mem, &mut a, 0x11, 0x11, &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_u8() {
let mut reg = Register::new(10);
ld_u8(&mut reg, 0x18);
assert!(reg.read() == 0x18);
}
#[test]
fn test_reset_flag() {
let mut freg = Register::new(CarryFlag | ZeroFlag);
ccf(&mut freg);
assert!(!freg.read().contains(CarryFlag));
assert!(freg.read().contains(ZeroFlag));
freg.write(SubtractFlag);
ccf(&mut freg);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_decrement_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCC);
}
#[test]
fn test_set_flag() {
let mut freg = Register::new(Flags::empty());
set_flag(&mut freg, CarryFlag);
assert!(freg.read().contains(CarryFlag));
}
#[test]
fn test_ld_u8_immediate_into_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0xAD12);
mem.write_byte(0xAD12, 0xBB);
ld_u8_immediate_into_address(&mut mem, &mut pc, 0x12, 0x34);
assert!(mem.read_byte(0x1234) == 0xBB);
assert!(pc.read() == 0xAD13);
}
#[test]
fn test_decrement_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
mem.write_byte(0x1010, 1);
decrement_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 0);
assert!(freg.read().is_all());
mem.write_byte(0x01AB, 0x20);
decrement_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x1F);
assert!(freg.read() == CarryFlag | SubtractFlag);
freg.write(ZeroFlag);
mem.write_byte(0xABCD, 0xED);
decrement_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEC);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
}
#[test]
fn test_increment_value_at_address() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut freg = Register::new(CarryFlag);
increment_value_at_address(&mut mem, 0x10, 0x10, &mut freg);
assert!(mem.read_byte(0x1010) == 1);
assert!(freg.read() == CarryFlag);
mem.write_byte(0x01AB, 0x1F);
increment_value_at_address(&mut mem, 0x01, 0xAB, &mut freg);
assert!(mem.read_byte(0x01AB) == 0x20);
assert!(freg.read().contains(CarryFlag));
assert!(freg.read().contains(HalfCarryFlag));
freg.write(SubtractFlag);
mem.write_byte(0xABCD, 0xED);
increment_value_at_address(&mut mem, 0xAB, 0xCD, &mut freg);
assert!(mem.read_byte(0xABCD) == 0xEE);
assert!(freg.read() == Flags::empty());
}
#[test]
fn test_write_value_to_memory_at_address_and_decrement_register() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x33, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
high_byte.write(0x11);
low_byte.write(0x00);
write_value_to_memory_at_address_and_decrement_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x1100) == 0x8);
assert!(high_byte.read() == 0x10);
assert!(low_byte.read() == 0xFF);
}
#[test]
fn test_ld_next_two_bytes_into_reg() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_next_two_bytes_into_reg(&mem, &mut pc, &mut reg);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDEAB);
}
#[test]
fn test_complement() {
let mut a = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | CarryFlag);
complement(&mut a, &mut freg);
assert!(a.read() == !0x11);
assert!(freg.read().is_all());
freg.write(Flags::empty());
complement(&mut a, &mut freg);
assert!(a.read() == 0x11);
assert!(freg.read().contains(HalfCarryFlag));
assert!(freg.read().contains(SubtractFlag));
}
#[test]
fn test_ld_from_address_pointed_to_by_register_pair_and_increment_register_pair() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut reg = Register::new(0x12);
let mut high_byte = Register::new(0xAB);
let mut low_byte = Register::new(0xCD);
mem.write_byte(0xABCD, 0x54);
ld_from_address_pointed_to_by_register_pair_and_increment_register_pair(&mem, &mut reg, &mut high_byte, &mut low_byte);
assert!(reg.read() == 0x54);
assert!(low_byte.read() == 0xCE);
}
#[test]
fn test_relative_jmp_by_signed_immediate_if_true() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut pc = Register::new(0x1234);
// Forwards
mem.write_byte(0x1234, 0x55);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x128A, "Should jump forwards");
// Backwards
mem.write_byte(0x128A, -10 as u8);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, true);
assert!(pc.read() == 0x1281, "Should jump back");
// no jump
mem.write_byte(0x1281, 0xFF);
relative_jmp_by_signed_immediate_if_true(&mem, &mut pc, false);
assert!(pc.read() == 0x1282, "Should not jump if ZeroFlag is not set. PC should increment to go past immediate value");
}
#[test]
fn test_write_value_to_memory_at_address_and_increment_register() {
let mut mem = EmptyMemory::new(0xFFFF);
let mut val = 0x8;
let mut high_byte = Register::new(0x12);
let mut low_byte = Register::new(0x34);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(low_byte.read() == 0x35, "Should increment register");
assert!(mem.read_byte(0x1234) == 0x8, "Should correctly write value");
low_byte.write(0xFF);
write_value_to_memory_at_address_and_increment_register(&mut mem, val, &mut high_byte, &mut low_byte);
assert!(mem.read_byte(0x12FF) == 0x8);
assert!(high_byte.read() == 0x13);
assert!(low_byte.read() == 0x00);
}
#[test]
fn test_jump_by_signed_immediate() {
let mut mem = EmptyMemory::new(0x10000);
let mut pc = Register::new(0x0101);
// 0x8A = -10 as i8
mem.write_byte(0x0101, -10 as u8);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0xF8, "Should jump backwards with negative number");
mem.write_byte(0xF8, 0x37);
jump_by_signed_immediate(&mem, &mut pc);
assert!(pc.read() == 0x130);
}
#[test]
fn test_rotate_right_with_carry() {
let mut reg = Register::new(0b10011001);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0b11001100);
assert!(freg.read() == CarryFlag);
reg.write(0x0);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x0);
assert!(freg.read() == ZeroFlag);
reg.write(0x01);
freg.write(CarryFlag);
rotate_right_with_carry(&mut reg, &mut freg);
assert!(reg.read() == 0x80);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register_pair() {
let mut reg1 = Register::new(0x70);
let mut reg2 = Register::new(0x00);
decrement_register_pair(&mut reg1, &mut reg2);
assert!(reg1.read() == 0x6F);
assert!(reg2.read() == 0xFF);
}
#[test]
fn test_ld_from_reg_pair_as_address() {
let mut mem = EmptyMemory::new(65000);
let mut rega = Register::new(0x00);
let mut reg1 = Register::new(0x12);
let mut reg2 = Register::new(0x34);
mem.write_byte(0x1234, 0xAA);
ld_from_address(&mem, &mut rega, pack_u16(reg1.read(), reg2.read()));
assert!(rega.read() == 0xAA);
}
#[test]
fn test_add_register_pair_to_register_pair() {
let mut rega = Register::new(0x11);
let mut regb = Register::new(0x11);
let mut reg1 = Register::new(0x11);
let mut reg2 = Register::new(0x11);
let mut freg = Register::new(ZeroFlag | SubtractFlag | HalfCarryFlag | CarryFlag);
// Basic add make sure ZeroFlag isn't affected
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x2222);
assert!(freg.read() == ZeroFlag);
rega.write(0xF1);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x12);
// Carry from bit 15
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x03BD);
assert!(freg.read() == ZeroFlag | CarryFlag);
rega.write(0x1E);
regb.write(0xAB);
reg1.write(0x12);
reg2.write(0x16);
freg.write(ZeroFlag);
// Carry from bit 11
add_register_pair_to_register_pair(&mut rega, &mut regb, reg1.read(), reg2.read(), &mut freg);
assert!(pack_u16(rega.read(), regb.read()) == 0x30C1);
println!("{}", freg.read().bits());
assert!(freg.read() == ZeroFlag | HalfCarryFlag);
}
#[test]
fn test_write_stack_pointer_to_address_immediate() {
let mut sp = Register::new(0xBEEF);
let mut pc = Register::new(0x111);
let mut mem = EmptyMemory::new(65647);
mem.write_byte(0x111, 0xAD);
mem.write_byte(0x112, 0xDE);
write_u16_immediate_address(&mut mem, &mut pc, sp.read());
assert!(pc.read() == 0x113);
assert!(mem.read_word(0xDEAD) == 0xBEEF);
}
#[test]
fn test_rotate_left_with_carry() {
let mut reg = Register::new(0b00001111);
let mut freg = Register::new(SubtractFlag | HalfCarryFlag);
rotate_left_with_carry(&mut reg, &mut freg);
// Rotate should happen
assert!(reg.read() == 0b00011110);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x00);
rotate_left_with_carry(&mut regb, &mut freg);
// Zero should return zero with ZeroFlag
assert!(regb.read() == 0x00);
assert!(freg.read() == ZeroFlag);
let mut regc = Register::new(0b11001100);
rotate_left_with_carry(&mut regc, &mut freg);
// Carry should get set
assert!(regc.read() == 0b10011001);
assert!(freg.read() == CarryFlag);
}
#[test]
fn test_decrement_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0);
assert!(freg.read() == ZeroFlag | SubtractFlag | HalfCarryFlag);
reg.write(0xF1);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xF0);
assert!(freg.read() == SubtractFlag | HalfCarryFlag);
reg.write(0xF0);
freg.write(Flags::empty());
decrement_register(&mut reg, &mut freg);
assert!(reg.read() == 0xEF);
assert!(freg.read() == SubtractFlag);
}
#[test]
fn test_increment_register() {
let mut reg = Register::new(1);
let mut freg = Register::new(ZeroFlag | HalfCarryFlag);
increment_register(&mut reg, &mut freg);
assert!(reg.read() == 2);
assert!(freg.read() == Flags::empty());
let mut regb = Register::new(0x0F);
increment_register(&mut regb, &mut freg);
assert!(regb.read() == 0x10);
assert!(freg.read() == HalfCarryFlag);
let mut regc = Register::new(0xFF);
freg.write(Flags::empty());
increment_register(&mut regc, &mut freg);
assert!(regc.read() == 0x00);
assert!(freg.read() == HalfCarryFlag | ZeroFlag);
}
#[test]
fn test_add_reg_with_reg() {
let mut first = Register::new(0x05);
let mut second = Register::new(0x0B);
let mut flags = Register::new(SubtractFlag | CarryFlag);
add(&mut first, second.read(), &mut flags);
assert!(first.read() == 0x10, "Expected: {}, Actual: {}", "16", first.read());
assert!(flags.read() == HalfCarryFlag, "HalfCarry should be set");
let mut a = Register::new(0xFA);
let mut b = Register::new(0x07);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x01);
assert!(flags.read() == CarryFlag | HalfCarryFlag, "HalfCarry and CarryFlag should be set");
a.write(0);
b.write(0);
add(&mut a, b.read(), &mut flags);
assert!(a.read() == 0x0);
assert!(flags.read() == ZeroFlag);
}
#[test]
fn test_ld_u8_immediate() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
mem.write_byte(11, 0xFA);
ld_u8_immediate(&mem, &mut pc, &mut reg);
assert!(reg.read() == 0xFA);
assert!(pc.read() == 12);
}
#[test]
fn test_ld_reg_to_reg() {
let mut target = Register::new(5);
let mut source = Register::new(10);
ld_reg_to_reg(&mut target, &source);
assert!(target.read() == 10);
assert!(source.read() == 10);
}
#[test]
fn test_ld_u16_immediate() {
let mut mem = EmptyMemory::new(65536);
let mut pc = Register::new(11);
let mut reg = Register::new(0);
let mut reg2 = Register::new(0);
mem.write_word(11, 0xDEAB);
ld_u16_immediate(&mem, &mut pc, &mut reg, &mut reg2);
assert!(pc.read() == 13);
assert!(reg.read() == 0xDE);
assert!(reg2.read() == 0xAB);
}
#[test]
fn test_write_value_to_memory_at_address() {
let mut mem = EmptyMemory::new(65536);
let mut msb = 0xFF;
let mut lsb = 0x11;
let val = 100;
write_value_to_memory_at_address(&mut mem, val, msb, lsb);
assert!(mem.read_byte(0xFF11) == val, "Memory does match what was written");
}
#[test]
fn test_increment_register_pair() {
let mut msb = Register::new(0x11);
let mut lsb = Register::new(0x11);
increment_register_pair(&mut msb, &mut lsb);
assert!(msb.read() == 0x11);
assert!(lsb.read() == 0x12);
let mut msb_2 = Register::new(0x10);
let mut lsb_2 = Register::new(0xFF);
increment_register_pair(&mut msb_2, &mut lsb_2);
assert!(msb_2.read() == 0x11);
assert!(lsb_2.read() == 0x00);
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub unsafe fn get_name(bytes: &mut [u8; 12]) -> u32 {
let value: u32;
asm!("xorl %eax, %eax
cpuid
movl %ebx, (%rdi)
movl %edx, 0x4(%rdi)
movl %ecx, 0x8(%rdi)"
: "=r{eax}"(value) : "{rdi}"(bytes.as_ptr()) : "ebx", "edx", "ecx", "memory");
value
}
pub unsafe fn get_ext_fn_max() -> u32 {
let value: u32;
asm!("movl $$0x80000000, %eax
cpuid"
: "=r{eax}"(value) : : "ebx");
value
}
pub unsafe fn get_brand_string(bytes: &mut [u8; 48]) {
asm!("movl $$0x80000002, %eax
movl %eax, %esi
.L0:
cpuid
movl %eax, (%rdi)
movl %ebx, 0x4(%rdi)
movl %ecx, 0x8(%rdi)
movl %edx, 0xc(%rdi)
addq $$0x10, %rdi
addl $$0x1, %esi
movl %esi, %eax
cmpl $$0x80000004, %esi
jle .L0"
: : "{rdi}"(bytes.as_ptr()) : "rdi", "eax", "esi", "ebx", "ecx", "edx", "memory");
}
pub unsafe fn get_info_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x1, %eax
cpuid
movl %edx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_ext_feature_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x7, %eax
cpuid
movl %ebx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_ext_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x80000001, %eax
cpuid
movl %edx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_stepping_bits() -> u32 {
let value: u32;
asm!("movl $$0x1, %eax
cpuid"
: "=r{eax}"(value) : : "eax", "edx", "ecx", "ebx");
value
}
Use directional label in get_brand_string
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
pub unsafe fn get_name(bytes: &mut [u8; 12]) -> u32 {
let value: u32;
asm!("xorl %eax, %eax
cpuid
movl %ebx, (%rdi)
movl %edx, 0x4(%rdi)
movl %ecx, 0x8(%rdi)"
: "=r{eax}"(value) : "{rdi}"(bytes.as_ptr()) : "ebx", "edx", "ecx", "memory");
value
}
pub unsafe fn get_ext_fn_max() -> u32 {
let value: u32;
asm!("movl $$0x80000000, %eax
cpuid"
: "=r{eax}"(value) : : "ebx");
value
}
pub unsafe fn get_brand_string(bytes: &mut [u8; 48]) {
asm!("movl $$0x80000002, %eax
movl %eax, %esi
0:
cpuid
movl %eax, (%rdi)
movl %ebx, 0x4(%rdi)
movl %ecx, 0x8(%rdi)
movl %edx, 0xc(%rdi)
addq $$0x10, %rdi
addl $$0x1, %esi
movl %esi, %eax
cmpl $$0x80000004, %esi
jle 0b"
: : "{rdi}"(bytes.as_ptr()) : "rdi", "eax", "esi", "ebx", "ecx", "edx", "memory");
}
pub unsafe fn get_info_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x1, %eax
cpuid
movl %edx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_ext_feature_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x7, %eax
cpuid
movl %ebx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_ext_bits() -> u64 {
let mut value: u64 = 0;
asm!("movl $$0x80000001, %eax
cpuid
movl %edx, (%rdi)
movl %ecx, 0x4(%rdi)"
: : "{rdi}"(&mut value as *mut u64) : "edx", "ebx", "ecx", "memory");
value
}
pub unsafe fn get_stepping_bits() -> u32 {
let value: u32;
asm!("movl $$0x1, %eax
cpuid"
: "=r{eax}"(value) : : "eax", "edx", "ecx", "ebx");
value
}
|
use cocoa::base::id;
use std::error::Error;
use std::fmt::{self, Display, Formatter};
/// A type which can be constructed from an Objective-C pointer.
pub trait FromRaw: Sized {
/// Creates the object from the pointer. If there is a problem with the pointer,
/// this function will return a FromRawError.
fn from_raw(raw_pointer: id) -> Result<Self, FromRawError>;
}
/// A type which can be borrowed as an Objective-C pointer.
pub trait AsRaw {
/// Get an immutable reference to the pointer.
fn as_raw(&self) -> &id;
/// Get a mutable reference to the pointer.
fn as_raw_mut(&mut self) -> &mut id;
}
/// A type which can be converted into an Objective-C pointer.
pub trait IntoRaw {
/// Get the underlying pointer to the object. Releases ownership.
fn into_raw(self) -> id;
}
/// Represents errors which may occur when constructing an object from a pointer.
#[derive(Clone, Debug)]
pub enum FromRawError {
/// The pointer was nil.
NilPointer,
/// The pointer does not point to either:
///
/// * an object of the correct type,
/// * an object which does not have a certain base class,
/// * an object which does not implement a particular protocol.
WrongPointerType
}
macro_rules! impl_from_into_raw {
($wrapper_type:ident, of protocol $protocol:expr) => (
impl $crate::FromRaw for $wrapper_type {
fn from_raw(raw_pointer: id) -> Result<Self, $crate::FromRawError> {
use $crate::internal::conforms_to_protocol;
use cocoa::base::nil;
if raw_pointer == nil {
Err($crate::FromRawError::NilPointer)
} else if unsafe { conforms_to_protocol(raw_pointer, $protocol) } {
Err($crate::FromRawError::WrongPointerType)
} else {
Ok($wrapper_type(raw_pointer))
}
}
}
impl $crate::AsRaw for $wrapper_type {
fn as_raw(&self) -> &id {
&self.0
}
fn as_raw_mut(&mut self) -> &mut id {
&mut self.0
}
}
impl $crate::IntoRaw for $wrapper_type {
fn into_raw(self) -> id {
self.0
}
}
)
($wrapper_type:ident, of class $class:expr) => (
impl $crate::FromRaw for $wrapper_type {
fn from_raw(raw_pointer: id) -> Result<Self, $crate::FromRawError> {
use $crate::internal::is_kind_of_class;
use cocoa::base::nil;
if raw_pointer == nil {
Err($crate::FromRawError::NilPointer)
} else if unsafe { is_kind_of_class(raw_pointer, $class) } {
Err($crate::FromRawError::WrongPointerType)
} else {
Ok($wrapper_type(raw_pointer))
}
}
}
impl $crate::AsRaw for $wrapper_type {
fn as_raw(&self) -> &id {
&self.0
}
fn as_raw_mut(&mut self) -> &mut id {
&mut self.0
}
}
impl $crate::IntoRaw for $wrapper_type {
fn into_raw(self) -> id {
self.0
}
}
)
}
impl Display for FromRawError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let descr = match *self {
FromRawError::NilPointer => "FromRawError::NilPointer",
FromRawError::WrongPointerType => "FromRawError::WrongPointerType",
};
write!(f, "{}", descr)
}
}
impl Error for FromRawError {
fn description(&self) -> &str {
match *self {
FromRawError::NilPointer => "Attempted to create an object from nil",
FromRawError::WrongPointerType => "The object pointer is not of the correct class",
}
}
}
Fix compile error
use cocoa::base::id;
use std::error::Error;
use std::fmt::{self, Display, Formatter};
/// A type which can be constructed from an Objective-C pointer.
pub trait FromRaw: Sized {
/// Creates the object from the pointer. If there is a problem with the pointer,
/// this function will return a FromRawError.
fn from_raw(raw_pointer: id) -> Result<Self, FromRawError>;
}
/// A type which can be borrowed as an Objective-C pointer.
pub trait AsRaw {
/// Get an immutable reference to the pointer.
fn as_raw(&self) -> &id;
/// Get a mutable reference to the pointer.
fn as_raw_mut(&mut self) -> &mut id;
}
/// A type which can be converted into an Objective-C pointer.
pub trait IntoRaw {
/// Get the underlying pointer to the object. Releases ownership.
fn into_raw(self) -> id;
}
/// Represents errors which may occur when constructing an object from a pointer.
#[derive(Clone, Debug)]
pub enum FromRawError {
/// The pointer was nil.
NilPointer,
/// The pointer does not point to either:
///
/// * an object of the correct type,
/// * an object which does not have a certain base class,
/// * an object which does not implement a particular protocol.
WrongPointerType
}
macro_rules! impl_from_into_raw {
($wrapper_type:ident, of protocol $protocol:expr) => (
impl $crate::FromRaw for $wrapper_type {
fn from_raw(raw_pointer: id) -> Result<Self, $crate::FromRawError> {
use $crate::internal::conforms_to_protocol;
use cocoa::base::nil;
if raw_pointer == nil {
Err($crate::FromRawError::NilPointer)
} else if unsafe { conforms_to_protocol(raw_pointer, $protocol) } {
Err($crate::FromRawError::WrongPointerType)
} else {
Ok($wrapper_type(raw_pointer))
}
}
}
impl $crate::AsRaw for $wrapper_type {
fn as_raw(&self) -> &id {
&self.0
}
fn as_raw_mut(&mut self) -> &mut id {
&mut self.0
}
}
impl $crate::IntoRaw for $wrapper_type {
fn into_raw(self) -> id {
self.0
}
}
);
($wrapper_type:ident, of class $class:expr) => (
impl $crate::FromRaw for $wrapper_type {
fn from_raw(raw_pointer: id) -> Result<Self, $crate::FromRawError> {
use $crate::internal::is_kind_of_class;
use cocoa::base::nil;
if raw_pointer == nil {
Err($crate::FromRawError::NilPointer)
} else if unsafe { is_kind_of_class(raw_pointer, $class) } {
Err($crate::FromRawError::WrongPointerType)
} else {
Ok($wrapper_type(raw_pointer))
}
}
}
impl $crate::AsRaw for $wrapper_type {
fn as_raw(&self) -> &id {
&self.0
}
fn as_raw_mut(&mut self) -> &mut id {
&mut self.0
}
}
impl $crate::IntoRaw for $wrapper_type {
fn into_raw(self) -> id {
self.0
}
}
)
}
impl Display for FromRawError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let descr = match *self {
FromRawError::NilPointer => "FromRawError::NilPointer",
FromRawError::WrongPointerType => "FromRawError::WrongPointerType",
};
write!(f, "{}", descr)
}
}
impl Error for FromRawError {
fn description(&self) -> &str {
match *self {
FromRawError::NilPointer => "Attempted to create an object from nil",
FromRawError::WrongPointerType => "The object pointer is not of the correct class",
}
}
}
|
use alloc::raw_vec::RawVec;
use num::{self, PrimInt};
use std::cmp;
use std::ops::*;
use std::fmt::{self, Debug, Display};
use std::mem;
use std::ptr;
use std::marker::PhantomData;
pub trait Nbits {
fn bits() -> usize;
#[inline]
fn mask() -> usize {
(0..).take(Self::bits()).fold(0, |mask, _x| mask << 1 | 1)
}
}
pub struct NbitsVec<T: Nbits, B = usize> {
buf: RawVec<B>,
len: usize,
_marker: PhantomData<T>,
}
impl<
T: Nbits,
B: PrimInt,
> Default for NbitsVec<T, B> {
fn default() -> Self {
NbitsVec {
buf: RawVec::new(),
len: 0,
_marker: PhantomData,
}
}
}
impl<T: Nbits, B: PrimInt + fmt::LowerHex> Debug for NbitsVec<T, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f,
"NbitsVec<{}> {{ len: {}, buf: RawVec {{ cap: {}, [",
T::bits(),
self.len,
self.buf.cap()));
let ptr = self.buf.ptr();
for i in 0..self.buf.cap() {
unsafe {
try!(write!(f, "{:#x}, ", ptr::read(ptr.offset(i as isize))));
}
}
write!(f, "] }}")
}
}
impl<
T: Nbits,
B: PrimInt
> NbitsVec<T, B> {
/// Constructs a new, empty NbitsVec<T>
///
/// The vector will not allocate until elements are pushed onto it.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// # }
/// ```
#[inline]
pub fn new() -> Self {
NbitsVec {
buf: RawVec::new(),
len: 0,
_marker: PhantomData,
}
}
/// Constructs a new, empty Vec<T> with the specified capacity.
///
/// The vector will be able to hold exactly capacity elements without reallocating. If capacity
/// is 0, the vector will not allocate.
///
/// It is important to note that this function does not specify the length of the returned
/// vector, but only the capacity.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert!(vec.capacity() >= 10);
/// # }
/// ```
pub fn with_capacity(capacity: usize) -> Self {
NbitsVec {
buf: RawVec::with_capacity(Self::capacity_to_buf(capacity)),
len: 0,
_marker: PhantomData,
}
}
pub unsafe fn from_raw_parts(ptr: *mut B, length: usize, capacity: usize) -> Self {
NbitsVec {
buf: RawVec::from_raw_parts(ptr, Self::capacity_to_buf(capacity)),
len: length,
_marker: PhantomData,
}
}
/// Returns the number of elements the vector can hold without reallocating.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::{NbitsVec, As1bits};
/// # fn main() {
/// let v: NbitsVec<As1bits> = NbitsVec::with_capacity(10);
/// assert!(v.capacity() >= 10);
/// assert_eq!(v.capacity(), std::mem::size_of::<usize>() * 8);
/// # }
/// ```
#[inline(always)]
pub fn capacity(&self) -> usize {
Self::capacity_from_buf(self.buf.cap())
}
/// Reserves capacity for at least additional more elements to be inserted in the given
/// NbitsVec<T>.
/// The collection may reserve more space to avoid frequent reallocations.
///
/// # Panics
///
/// Panics if the new capacity overflows usize.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// assert!(v.capacity() == 0);
/// v.reserve(100);
/// assert!(v.capacity() >= 100);
/// # }
/// ```
pub fn reserve(&mut self, additional: usize) {
let required_cap = self.len().checked_add(additional).expect("capacity overflow");
let used_cap = Self::capacity_to_buf(self.len());
let need_extra_cap = Self::capacity_to_buf(required_cap);
self.buf.reserve(used_cap, need_extra_cap);
}
/// Reserves the minimum capacity for exactly additional more elements to be inserted in the
/// given `NbitsVec<T>`. Does nothing if the capacity is already sufficient.
///
/// # Panics
///
/// Panics if the new capacity overflows usize.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// assert!(v.capacity() == 0);
/// v.reserve_exact(64);
/// assert_eq!(v.capacity(), 64);
/// v.reserve_exact(127);
/// assert!(v.capacity() >= 127);
/// v.reserve_exact(128);
/// assert_eq!(v.capacity(), 128);
/// # }
/// ```
pub fn reserve_exact(&mut self, additional: usize) {
let required_cap = self.len().checked_add(additional).expect("capacity overflow");
let used_cap = Self::capacity_to_buf(self.len());
let need_extra_cap = Self::capacity_to_buf(required_cap);
self.buf.reserve_exact(used_cap, need_extra_cap);
}
/// Shrinks the capacity of the vector as much as possible.
///
/// It will drop down as close as possible to the length but the allocator may still inform the
/// vector that there is space for a few more elements.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.shrink_to_fit();
/// assert_eq!(vec.capacity(), 0);
/// # }
/// ```
///
pub fn shrink_to_fit(&mut self) {
let fit_len = Self::capacity_to_buf(self.len());
self.buf.shrink_to_fit(fit_len);
}
/// Expands the length of the vector as much as possible with current capacity.
///
/// Be sure not to use the method if the capacity is not setted by yourself - means you didn't
/// expect the capacity so as the length.
pub fn expand_to_fit(&mut self) {
let fit_len = Self::capacity_to_buf(self.len());
unimplemented!();
}
pub fn into_boxed_slice(self) -> Box<[T]> {
unimplemented!();
}
/// Shorten a vector to be `len` elements long, dropping excess elements.
///
/// If `len` is greater than the vector's current length, this has no effect.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(2);
/// unsafe { vec.set_len(2) }
/// vec.truncate(3);
/// assert_eq!(vec.len(), 2);
/// vec.truncate(1);
/// assert_eq!(vec.len(), 1);
/// # }
/// ```
pub fn truncate(&mut self, len: usize) {
if self.len() > len {
self.len = len;
self.shrink_to_fit();
}
}
pub fn as_slice(&self) -> &[T] {
unimplemented!();
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unimplemented!();
}
/// Sets the length of a vector.
///
/// This will explicitly set the size of the vector, without actually modifying its buffers or
/// reserving additional capacity as needed, so it is up to the caller to ensure that the vector
/// is actually the specified size.
///
/// Recommend to use [resize()](#method.resize) when you actually want to `resize` the vector.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// unsafe {
/// v.set_len(3);
/// }
/// assert_eq!(v.len(), 3);
/// assert_eq!(v.capacity(), 0); // as documented, the capacity will not change
/// unsafe {
/// v.set_len(1)
/// }
/// assert_eq!(v.len(), 1);
/// # }
/// ```
#[inline]
pub unsafe fn set_len(&mut self, len: usize) {
self.len = len;
}
pub fn swap_remove(&mut self, index: usize) -> T {
unimplemented!();
}
pub fn insert(&mut self, index: usize, element: T) {
unimplemented!();
}
pub fn remove(&mut self, index: usize) {
unimplemented!();
}
pub fn retain<F>(&mut self, f: F)
where F: FnMut(&T) -> bool
{
unimplemented!();
}
pub fn append(&mut self, other: &mut NbitsVec<T>) {
unimplemented!();
}
#[inline]
pub fn clear(&mut self) {
self.len = 0;
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Returns the number of bits in current length.
///
/// It is related to the element numbers - not the capacity.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert_eq!(vec.bits(), 0);
/// # }
/// ```
#[inline]
pub fn bits(&self) -> usize {
self.len() * Self::unit_bits()
}
/// Total bits in buf.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert_eq!(vec.buf_bits(), std::mem::size_of::<usize>() * 8);
/// # }
/// ```
pub fn buf_bits(&self) -> usize {
self.buf.cap() * Self::buf_unit_bits()
}
/// Returns whether or not the vector is empty.
///
/// Alias to `len() == 0`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert!(vec.is_empty());
/// # }
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn split_off(&mut self, at: usize) -> Self {
unimplemented!();
}
pub fn push_all(&mut self, other: &[T]) {
unimplemented!();
}
// And any lost functions from `dedup` to the end.
pub fn get_mut(&self, index: usize) {
unimplemented!();
}
/// Appends an element to the back of a collection.
///
/// # Panics
///
/// Panics if the number of elements in the vector overflows a `usize`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.push(0b10);
/// vec.push(0b01);
/// assert_eq!(vec.len(), 2);
/// # }
/// ```
pub fn push(&mut self, value: B) {
let len = self.len();
let new_len = len.checked_add(1).expect("usize added overflows");
self.reserve(1);
self.len = new_len;
self.set(len, value);
}
/// Removes the last element from a vector and returns it, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.push(0b11);
/// assert_eq!(vec.pop(), Some(0b11));
/// assert_eq!(vec.len(), 0);
/// # }
/// ```
pub fn pop(&mut self) -> Option<B> {
if self.len() == 0 {
return None;
}
let first = self.get(0);
self.align(1, 0);
Some(first)
}
/// Resizes the Vec in-place so that len() is equal to new_len.
///
/// If new_len is greater than len(), the Vec is extended by the difference, with each
/// additional slot filled with value. If new_len is less than len(), the Vec is simply
/// truncated. Note that `resize` expand memeory will use `reserve_exact` method to
/// fit size.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.resize(10, 0);
/// assert_eq!(vec.capacity(), std::mem::size_of::<usize>() * 8 / 2);
/// # }
/// ```
#[inline]
pub fn resize(&mut self, new_len: usize, value: B) {
let len = self.len();
if len < new_len {
let n = new_len - len;
self.reserve_exact(n);
unsafe {
self.fill_buf(len, n, value);
self.len = new_len;
}
} else {
self.truncate(new_len);
}
}
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits, u8> = NbitsVec::new();
/// vec.resize(24, 0);
/// unsafe {
/// vec.fill_buf(0, 12, 1);
/// vec.fill_buf(12, 12, 2);
/// }
/// println!("{:?}", vec);
/// // Left align will reduce the length.
/// vec.align(1, 0);
/// assert_eq!(vec.len(), 23);
/// assert!((0..).take(11).all(|x| vec.get(x) == 1));
/// assert!((11..).take(12).all(|x| vec.get(x) == 2));
///
/// vec.align(11, 3);
/// assert_eq!(vec.len(), 23 - 8);
/// assert!((0..).take(3).all(|x| vec.get(x) == 1));
/// assert!((3..vec.len()).all(|x| vec.get(x) == 2));
/// // Right align will expand the length.
/// vec.align(6, 7);
/// assert_eq!(vec.len(), 23 - 8 + 1);
/// assert!((6..7).all(|x| vec.get(x) == 0));
/// assert!((7..vec.len()).all(|x| vec.get(x) == 2));
///
/// vec.align(13, 33);
/// assert_eq!(vec.len(), 23 - 8 + 1 + 33 - 13);
/// assert!((13..33).all(|x| vec.get(x) == 0));
/// assert!((33..vec.len()).all(|x| vec.get(x) == 2));
/// println!("{:?}", vec);
/// # }
/// ```
pub fn align(&mut self, offset: usize, to: usize) {
let unit = Self::unit_bits();
let buf_unit = Self::buf_unit_bits();
let unit_cap = buf_unit / unit;
if offset > to {
// Reduce `interval` length.
let interval = offset - to;
// e.g. N = 2, B = u8, interval = 4
if buf_unit % unit == 0 && interval % unit_cap == 0 {
// Copy previous offset * unit % buf_unit values.
let extra = offset % unit_cap;
let (offset, to) = (0..extra).fold((offset, to), |(offset, to), _i| {
let value = self.get(offset);
self.set(to, value);
(offset + 1, to + 1)
});
unsafe {
let ptr = self.buf.ptr();
let src = offset / unit_cap;
let dst = to / unit_cap;
let count = self.len() / unit_cap - src + 1;
ptr::copy(ptr.offset(src as isize), ptr.offset(dst as isize), count);
}
} else {
for offset in offset..self.len() {
let value = self.get(offset);
self.set(offset - interval, value);
}
}
self.len = self.len - interval;
} else {
// Expand with `interval` length values.
let interval = to - offset;
let len = self.len();
self.reserve_exact(interval);
if buf_unit % unit == 0 && interval % unit_cap == 0 {
unsafe {
let ptr = self.buf.ptr();
let src = offset / unit_cap;
let dst = to / unit_cap;
let count = len / unit_cap - src + 1;
ptr::copy(ptr.offset(src as isize), ptr.offset(dst as isize), count);
self.fill_buf(offset, interval, B::zero());
self.len = self.len() + interval;
}
} else {
self.len = len + interval;
for offset in (offset..len).rev() {
let value = self.get(offset);
self.set(offset + interval, value);
}
unsafe {
self.fill_buf(offset, interval, B::zero());
}
}
}
}
/// Fill vector buf as `value` from `index` with size `length`.
///
/// ## Unsafety
///
/// The method doesnot check the index validation of the vector.
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits, u8> = NbitsVec::new();
/// vec.resize(24, 0);
/// println!("{:?}", vec);
/// unsafe {
/// vec.fill_buf(1, 2, 2); // length < buf_unit
/// assert!((1..).take(2).all(|x| vec.get(x) == 2));
/// vec.fill_buf(0, 8, 1); // offset: 0, 0
/// assert!((0..).take(8).all(|x| vec.get(x) == 1));
/// vec.fill_buf(7, 10, 2); // offset: n, n
/// assert!((7..).take(10).all(|x| vec.get(x) == 2));
/// vec.fill_buf(8, 11, 1); // offset: 0,n
/// assert!((8..).take(11).all(|x| vec.get(x) == 1));
/// }
/// # }
/// ```
#[inline]
pub unsafe fn fill_buf(&mut self, index: usize, length: usize, value: B) {
let unit = Self::unit_bits();
if length == 1 {
return self.set_buf_bits(index * unit, unit, value);
}
let buf_unit = Self::buf_unit_bits();
if (length <= buf_unit / unit) || buf_unit % unit != 0 {
println!("length is short");
for i in (index..).take(length) {
self.set_buf_bits(i * unit, unit, value);
}
}
let mul = buf_unit / unit;
let item = (0..mul).fold(B::zero(), |v, _x| v << unit | value);
let ptr = self.buf.ptr();
let write_buf = |start: usize, end: usize| {
(start..end).fold(ptr.offset(start as isize), |ptr, _x| {
ptr::write(ptr, item);
ptr.offset(1)
});
};
match Self::index_range_to_buf(index, length) {
((start_idx, start_offset), (end_idx, end_offset)) if start_offset == 0 &&
end_offset == 0 => {
write_buf(start_idx, end_idx)
}
((start_idx, start_offset), (end_idx, end_offset)) if start_offset == 0 => {
write_buf(start_idx, end_idx);
self.set_buf_unit_bits(end_idx * buf_unit, end_offset, item);
}
((start_idx, start_offset), (end_idx, end_offset)) if end_offset == 0 => {
self.set_buf_unit_bits(index * unit, buf_unit - start_offset, item);
write_buf(start_idx + 1, end_idx);
}
((start_idx, start_offset), (end_idx, end_offset)) => {
self.set_buf_unit_bits(index * unit, buf_unit - start_offset, item);
self.set_buf_unit_bits(end_idx * buf_unit, end_offset, item);
write_buf(start_idx + 1, end_idx);
}
}
}
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// unsafe { vec.set_len(2) }
/// vec.set(0, 0b11);
/// # }
/// ```
#[inline]
pub fn set(&mut self, index: usize, value: B) {
if index >= self.len {
panic!("attempt to set at {} but only {}", index, self.len);
}
unsafe {
let unit = Self::unit_bits();
self.set_buf_bits(index * unit, unit, value);
}
}
/// Set `bit` at `index`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.reserve(10);
/// unsafe { vec.set_len(7) };
/// vec.set_bit(0, true);
/// # }
/// ```
///
#[inline]
pub fn set_bit(&mut self, index: usize, bit: bool) {
let bits = self.bits();
if index >= bits {
panic!("attempt to set bit out of bounds");
}
unsafe {
self.set_buf_unit_bit(index, bit);
}
}
/// Get `bit` at some bit index.
///
/// Returns `None` if required index is out of bounds, else return `bool` for bit value.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.reserve(10);
/// assert!(vec.get_bit(0).is_none());
/// vec.resize(10, 0);
/// println!("{:?}", vec);
/// for i in 0..8 {
/// vec.set_bit(i, true);
/// println!("Set at {} as true", i);
/// println!("{:?}", vec);
/// assert_eq!(vec.get_bit(i), Some(true));
/// }
/// for i in 0..8 {
/// vec.set_bit(i, false);
/// assert_eq!(vec.get_bit(i), Some(false));
/// }
/// # }
/// ```
#[inline]
pub fn get_bit(&self, at: usize) -> Option<bool> {
let bits = self.bits();
if at >= bits {
return None;
} else {
unsafe { Some(self.get_buf_unit_bit(at) == B::one()) }
}
}
/// Set `length` bits of buf at `offset`th bit as `value`.
///
/// ## Unsafety
///
/// `set_buf_bits` will not check the `offset`. Users should ensure to do this manually.
///
/// ## Panics
///
/// This method should panic while required `length` is longer than the buf unit bits size.
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
///
/// unsafe {
/// println!("Set buf 0 as 1");
/// vec.set_buf_bits(0, 1, 1);
/// println!("Set buf bits [1, 2] as `10`");
/// vec.set_buf_bits(1, 2, 2);
/// println!("Set buf bits [3, 6] as `1010`");
/// vec.set_buf_bits(3, 4, 0b1010);
/// }
/// println!("{:?}", vec);
/// unsafe {
/// assert_eq!(vec.get_buf_bits(0, 1), 1);
/// assert_eq!(vec.get_buf_bits(1, 2), 2);
/// assert_eq!(vec.get_buf_bits(3, 4), 0b1010);
/// }
/// # }
/// ```
#[inline]
pub unsafe fn set_buf_bits(&mut self, offset: usize, length: usize, value: B) {
let buf_unit = Self::buf_unit_bits();
if length > buf_unit {
panic!("set {} buf bits longer than buf unit bits {}",
length,
buf_unit);
}
if length == 1 {
return self.set_buf_unit_bit(offset, value & B::one() == B::one());
}
match Self::unit_bits() {
unit if unit == buf_unit => {
// NOTE: maybe unreachable!() is better.
self.set_buf_unit_bits(offset, length, value);
}
unit if unit < buf_unit && buf_unit % unit == 0 => {
self.set_buf_unit_bits(offset, length, value);
}
_ => {
let mut v = value;
for x in offset..cmp::min(offset + length, self.buf_bits()) {
self.set_buf_unit_bit(x, v & B::one() == B::one());
v = v >> 1;
}
}
}
}
/// Mask buf element of `index` at offset `(from, to)` as zero.
#[inline]
unsafe fn zero_buf_unit_bits(&mut self, offset: usize, length: usize) {
self.set_buf_unit_bits(offset, length, B::zero());
}
/// Set buf element of `index` at offset `from` to `to` as `value`.
#[inline]
unsafe fn set_buf_unit_bits(&mut self, offset: usize, length: usize, value: B) {
let (index, offset) = Self::bit_index_to_buf(offset);
let mask = (offset..)
.take(length)
.fold(B::zero(), |mask, _x| mask << 1 | B::one()) <<
offset;
let ptr = self.buf.ptr().offset(index as isize);
let cur = ptr::read(ptr);
let new = mask & (value << offset);
let old = mask & cur;
if old != new {
ptr::write(ptr, cur & !mask | new);
}
}
/// Set buf unit bit at `index`th unit of `offset`bit.
#[inline]
unsafe fn set_buf_unit_bit(&mut self, offset: usize, bit: bool) {
let (index, offset) = Self::bit_index_to_buf(offset);
let mask = B::one() << offset;
let ptr = self.buf.ptr().offset(index as isize);
let cur = ptr::read(ptr);
let old = cur >> offset & B::one();
match (old == B::one(), bit) {
(lhs, rhs) if lhs == rhs => (),
(_, true) => ptr::write(ptr, cur | mask),
(_, false) => ptr::write(ptr, cur & mask.not()),
}
}
/// Get `N` bits value as `B`.
///
/// ## TODO
///
/// ?? Is a `Nbits` object is better than `B` ??
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// unsafe { vec.set_len(2) }
/// vec.set(0, 0b11);
/// assert_eq!(vec.get(0), 0b11);
/// # }
/// ```
pub fn get(&self, index: usize) -> B {
if index >= self.len {
panic!("attempt to get at {} but only {}", index, self.len);
}
let unit = Self::unit_bits();
unsafe { self.get_buf_bits(index * unit, unit) }
}
/// Get `length` bits of buf at `offset`th bit.
///
/// # Unsafety
///
/// `get_buf_bits` will not check the `offset`. Users should ensure to do this manually.
///
/// # Panics
///
/// This method should panic while required `length` is longer than the buf unit bits size.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.resize(10, 0);
/// println!("{:?}", vec);
/// for i in 0..8 {
/// vec.set_bit(i, if i % 2 == 0 { true } else { false });
/// }
/// println!("{:?}", vec);
/// unsafe {
/// println!("Get buf bits at 0 with length 1");
/// assert_eq!(vec.get_buf_bits(0, 1), 1);
/// println!("Get buf bits at 1 with length 2");
/// assert_eq!(vec.get_buf_bits(1, 2), 2);
/// println!("Get buf bits at 3 with length 4");
/// assert_eq!(vec.get_buf_bits(3, 4), 0b1010);
/// }
/// # }
/// ```
#[inline]
pub unsafe fn get_buf_bits(&self, offset: usize, length: usize) -> B {
let buf_unit = Self::buf_unit_bits();
if length > buf_unit {
panic!("get {} buf bits longer than buf unit bits {}",
length,
buf_unit);
}
if length == 1 {
return self.get_buf_unit_bit(offset);
}
match (Self::unit_bits(), Self::buf_unit_bits()) {
(unit, buf_unit) if unit == buf_unit => {
// NOTE: maybe unreachable!() is better
self.get_buf_unit_bits(offset, length)
}
(unit, buf_unit) if unit < buf_unit && buf_unit % unit == 0 => {
self.get_buf_unit_bits(offset, length)
}
(_, _) => {
(offset..cmp::min(offset + length, self.buf_bits()))
.map(|x| self.get_buf_unit_bit(x))
.fold(B::zero(), |v, x| v << 1 | x)
}
}
}
/// Get buf unit bit at `index`th unit of `offset`bit.
#[inline]
unsafe fn get_buf_unit_bit(&self, offset: usize) -> B {
let (index, offset) = Self::bit_index_to_buf(offset);
let ptr = self.buf.ptr().offset(index as isize);
ptr::read(ptr) >> offset & B::one()
}
/// Get buf `length` bits of unit at `index`th unit's `offset`th bit
#[inline]
unsafe fn get_buf_unit_bits(&self, offset: usize, length: usize) -> B {
let offset = Self::bit_index_to_buf(offset);
let ptr = self.buf.ptr().offset(offset.0 as isize);
let unit = Self::buf_unit_bits();
(ptr::read(ptr) << (unit - offset.1 - length)) >> (unit - length)
}
/// Converts capacity to storage size
#[inline]
fn capacity_to_buf(capacity: usize) -> usize {
if capacity == 0 {
0
} else {
(capacity * Self::unit_bits() - 1) / (Self::buf_unit_bits()) + 1
}
}
/// Converts the storage size to capacity.
#[inline]
fn capacity_from_buf(buf_cap: usize) -> usize {
buf_cap * Self::buf_unit_bits() / Self::unit_bits()
}
/// Converts the vector index to buf `(index, offset)` tuple.
#[inline]
fn index_to_buf(index: usize) -> (usize, usize) {
let elem_bits = Self::buf_unit_bits();
let bits_index = index * Self::unit_bits();
(bits_index / elem_bits, bits_index % elem_bits)
}
/// Converts the vector index range to buf `(index, offset)` range tuple.
#[inline]
fn index_range_to_buf(index: usize, length: usize) -> ((usize, usize), (usize, usize)) {
(Self::index_to_buf(index),
Self::index_to_buf(index + length))
}
/// Converts bit index to buf `(index, offset)` tuple.
#[inline]
fn bit_index_to_buf(index: usize) -> (usize, usize) {
let unit = Self::buf_unit_bits();
(index / unit, index % unit)
}
/// Returns size of `B`.
#[inline]
fn buf_unit_bits() -> usize {
mem::size_of::<B>() * 8
}
/// Returns unit of bits - that is `NbitsVec`'s `N`.
#[inline]
fn unit_bits() -> usize {
T::bits()
}
}
Implemented `insert` `remove` `append` `swap_remove`.
use alloc::raw_vec::RawVec;
use num::{self, PrimInt};
use std::cmp;
use std::ops::*;
use std::fmt::{self, Debug, Display};
use std::mem;
use std::ptr;
use std::marker::PhantomData;
pub trait Nbits {
fn bits() -> usize;
#[inline]
fn mask() -> usize {
(0..).take(Self::bits()).fold(0, |mask, _x| mask << 1 | 1)
}
}
pub struct NbitsVec<T: Nbits, B = usize> {
buf: RawVec<B>,
len: usize,
_marker: PhantomData<T>,
}
impl<
T: Nbits,
B: PrimInt,
> Default for NbitsVec<T, B> {
fn default() -> Self {
NbitsVec {
buf: RawVec::new(),
len: 0,
_marker: PhantomData,
}
}
}
impl<T: Nbits, B: PrimInt + fmt::LowerHex> Debug for NbitsVec<T, B> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f,
"NbitsVec<{}> {{ len: {}, buf: RawVec {{ cap: {}, [",
T::bits(),
self.len,
self.buf.cap()));
let ptr = self.buf.ptr();
for i in 0..self.buf.cap() {
unsafe {
try!(write!(f, "{:#x}, ", ptr::read(ptr.offset(i as isize))));
}
}
write!(f, "] }}")
}
}
impl<
T: Nbits,
B: PrimInt
> NbitsVec<T, B> {
/// Constructs a new, empty NbitsVec<T>
///
/// The vector will not allocate until elements are pushed onto it.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// # }
/// ```
#[inline]
pub fn new() -> Self {
NbitsVec {
buf: RawVec::new(),
len: 0,
_marker: PhantomData,
}
}
/// Constructs a new, empty Vec<T> with the specified capacity.
///
/// The vector will be able to hold exactly capacity elements without reallocating. If capacity
/// is 0, the vector will not allocate.
///
/// It is important to note that this function does not specify the length of the returned
/// vector, but only the capacity.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert!(vec.capacity() >= 10);
/// # }
/// ```
pub fn with_capacity(capacity: usize) -> Self {
NbitsVec {
buf: RawVec::with_capacity(Self::capacity_to_buf(capacity)),
len: 0,
_marker: PhantomData,
}
}
pub unsafe fn from_raw_parts(ptr: *mut B, length: usize, capacity: usize) -> Self {
NbitsVec {
buf: RawVec::from_raw_parts(ptr, Self::capacity_to_buf(capacity)),
len: length,
_marker: PhantomData,
}
}
/// Returns the number of elements the vector can hold without reallocating.
///
/// # Examples
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::{NbitsVec, As1bits};
/// # fn main() {
/// let v: NbitsVec<As1bits> = NbitsVec::with_capacity(10);
/// assert!(v.capacity() >= 10);
/// assert_eq!(v.capacity(), std::mem::size_of::<usize>() * 8);
/// # }
/// ```
#[inline(always)]
pub fn capacity(&self) -> usize {
Self::capacity_from_buf(self.buf.cap())
}
/// Reserves capacity for at least additional more elements to be inserted in the given
/// NbitsVec<T>.
/// The collection may reserve more space to avoid frequent reallocations.
///
/// # Panics
///
/// Panics if the new capacity overflows usize.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// assert!(v.capacity() == 0);
/// v.reserve(100);
/// assert!(v.capacity() >= 100);
/// # }
/// ```
pub fn reserve(&mut self, additional: usize) {
let required_cap = self.len().checked_add(additional).expect("capacity overflow");
let used_cap = Self::capacity_to_buf(self.len());
let need_extra_cap = Self::capacity_to_buf(required_cap);
self.buf.reserve(used_cap, need_extra_cap);
}
/// Reserves the minimum capacity for exactly additional more elements to be inserted in the
/// given `NbitsVec<T>`. Does nothing if the capacity is already sufficient.
///
/// # Panics
///
/// Panics if the new capacity overflows usize.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// assert!(v.capacity() == 0);
/// v.reserve_exact(64);
/// assert_eq!(v.capacity(), 64);
/// v.reserve_exact(127);
/// assert!(v.capacity() >= 127);
/// v.reserve_exact(128);
/// assert_eq!(v.capacity(), 128);
/// # }
/// ```
pub fn reserve_exact(&mut self, additional: usize) {
let required_cap = self.len().checked_add(additional).expect("capacity overflow");
let used_cap = Self::capacity_to_buf(self.len());
let need_extra_cap = Self::capacity_to_buf(required_cap);
self.buf.reserve_exact(used_cap, need_extra_cap);
}
/// Shrinks the capacity of the vector as much as possible.
///
/// It will drop down as close as possible to the length but the allocator may still inform the
/// vector that there is space for a few more elements.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.shrink_to_fit();
/// assert_eq!(vec.capacity(), 0);
/// # }
/// ```
///
pub fn shrink_to_fit(&mut self) {
let fit_len = Self::capacity_to_buf(self.len());
self.buf.shrink_to_fit(fit_len);
}
/// Expands the length of the vector as much as possible with current capacity.
///
/// Be sure not to use the method if the capacity is not setted by yourself - means you didn't
/// expect the capacity so as the length.
pub fn expand_to_fit(&mut self) {
let fit_len = Self::capacity_to_buf(self.len());
unimplemented!();
}
/// Shorten a vector to be `len` elements long, dropping excess elements.
///
/// If `len` is greater than the vector's current length, this has no effect.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(2);
/// unsafe { vec.set_len(2) }
/// vec.truncate(3);
/// assert_eq!(vec.len(), 2);
/// vec.truncate(1);
/// assert_eq!(vec.len(), 1);
/// # }
/// ```
pub fn truncate(&mut self, len: usize) {
if self.len() > len {
self.len = len;
self.shrink_to_fit();
}
}
pub fn as_slice(&self) -> &[T] {
unimplemented!();
}
pub fn as_mut_slice(&mut self) -> &mut [T] {
unimplemented!();
}
pub fn into_boxed_slice(self) -> Box<[T]> {
unimplemented!();
}
/// Sets the length of a vector.
///
/// This will explicitly set the size of the vector, without actually modifying its buffers or
/// reserving additional capacity as needed, so it is up to the caller to ensure that the vector
/// is actually the specified size.
///
/// Recommend to use [resize()](#method.resize) when you actually want to `resize` the vector.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// unsafe {
/// v.set_len(3);
/// }
/// assert_eq!(v.len(), 3);
/// assert_eq!(v.capacity(), 0); // as documented, the capacity will not change
/// unsafe {
/// v.set_len(1)
/// }
/// assert_eq!(v.len(), 1);
/// # }
/// ```
#[inline]
pub unsafe fn set_len(&mut self, len: usize) {
self.len = len;
}
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// v.push(0b01);
/// v.push(0b10);
/// assert_eq!(v.len(), 2);
/// v.insert(1, 0b11);
/// assert_eq!(v.get(1), 0b11);
/// assert_eq!(v.get(2), 0b10);
/// # }
pub fn insert(&mut self, index: usize, element: B) {
self.align(index, index + 1);
self.set(index, element);
}
/// Removes and returns the element at position `index` within the vector, shifting all elements
/// after position `index` one position to the left.
///
/// # Panics
///
/// Panics if `index` is out of bounds.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// v.push(0b01);
/// v.push(0b10);
/// assert_eq!(v.remove(0), 0b01);
/// # }
/// ```
pub fn remove(&mut self, index: usize) -> B {
if index >= self.len {
panic!("index is out of bounds");
}
if self.is_empty() {
panic!("vector is empty");
}
if self.len() == 1 {
return self.pop().expect("swap removed with one element");
}
let removed = self.get(index);
self.align(index + 1, index);
removed
}
/// Removes an element from anywhere in the vector and return it, replacing it with the last
/// element.
///
/// # Panics
///
/// Panics if `index` is out of bounds.
/// Panics if vector is empty.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut v: NbitsVec<As2bits> = NbitsVec::new();
/// v.push(0b01);
/// v.push(0b10);
/// v.push(0b11);
/// assert_eq!(v.len(), 3);
/// println!("{:?}", v);
/// assert_eq!(v.swap_remove(0), 0b01);
/// println!("{:?}", v);
/// assert_eq!(v.len(), 2);
/// assert_eq!(v.get(0), 0b11);
/// assert_eq!(v.get(1), 0b10);
/// println!("{:?}", v);
/// assert_eq!(v.swap_remove(0), 0b11);
/// # }
/// ```
pub fn swap_remove(&mut self, index: usize) -> B {
if index >= self.len {
panic!("index is out of bounds");
}
if self.is_empty() {
panic!("vector is empty");
}
if self.len() == 1 {
return self.pop().expect("swap removed with one element");
}
let value = self.get(index);
let last = self.pop().expect("swap removed with last element");
self.set(index, last);
value
}
/// Moves all the elements of `other` into `Self`, leaving `other` empty.
///
/// # Panics
///
/// Panics if the number of elements in the vector overflows a `usize`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// let mut other: NbitsVec<As2bits> = NbitsVec::new();
/// other.resize(2, 0b10);
/// vec.append(&mut other);
/// assert_eq!(vec.len(), 2);
/// assert_eq!(other.len(), 0);
/// # assert_eq!(vec.get(0), 0b10);
/// # assert_eq!(vec.get(1), 0b10);
/// # }
/// ```
pub fn append(&mut self, other: &mut Self) {
let other_len = other.len();
self.reserve_exact(other_len);
for i in 0..other_len {
let v = other.get(i);
self.push(v);
}
unsafe { other.set_len(0) }
}
/// Unimplements
pub fn retain<F>(&mut self, f: F)
where F: FnMut(&T) -> bool
{
unimplemented!();
}
#[inline]
pub fn clear(&mut self) {
self.len = 0;
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Returns the number of bits in current length.
///
/// It is related to the element numbers - not the capacity.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert_eq!(vec.bits(), 0);
/// # }
/// ```
#[inline]
pub fn bits(&self) -> usize {
self.len() * Self::unit_bits()
}
/// Total bits in buf.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert_eq!(vec.buf_bits(), std::mem::size_of::<usize>() * 8);
/// # }
/// ```
pub fn buf_bits(&self) -> usize {
self.buf.cap() * Self::buf_unit_bits()
}
/// Returns whether or not the vector is empty.
///
/// Alias to `len() == 0`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// assert!(vec.is_empty());
/// # }
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
pub fn split_off(&mut self, at: usize) -> Self {
unimplemented!();
}
pub fn push_all(&mut self, other: &[T]) {
unimplemented!();
}
// And any lost functions from `dedup` to the end.
pub fn get_mut(&self, index: usize) {
unimplemented!();
}
/// Appends an element to the back of a collection.
///
/// # Panics
///
/// Panics if the number of elements in the vector overflows a `usize`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.push(0b10);
/// vec.push(0b01);
/// assert_eq!(vec.len(), 2);
/// # }
/// ```
pub fn push(&mut self, value: B) {
let len = self.len();
let new_len = len.checked_add(1).expect("usize added overflows");
self.reserve(1);
self.len = new_len;
self.set(len, value);
}
/// Removes the last element from a vector and returns it, or `None` if it is empty.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.push(0b10);
/// vec.push(0b11);
/// assert_eq!(vec.pop(), Some(0b11));
/// assert_eq!(vec.pop(), Some(0b10));
/// assert_eq!(vec.len(), 0);
/// # }
/// ```
pub fn pop(&mut self) -> Option<B> {
let len = self.len();
if self.is_empty() {
return None;
}
let new_len = len - 1;
let last = self.get(new_len);
self.len = new_len;
Some(last)
}
/// Resizes the Vec in-place so that len() is equal to new_len.
///
/// If new_len is greater than len(), the Vec is extended by the difference, with each
/// additional slot filled with value. If new_len is less than len(), the Vec is simply
/// truncated. Note that `resize` expand memeory will use `reserve_exact` method to
/// fit size.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.resize(10, 0);
/// assert_eq!(vec.capacity(), std::mem::size_of::<usize>() * 8 / 2);
/// # }
/// ```
#[inline]
pub fn resize(&mut self, new_len: usize, value: B) {
let len = self.len();
if len < new_len {
let n = new_len - len;
self.reserve_exact(n);
unsafe {
self.fill_buf(len, n, value);
self.len = new_len;
}
} else {
self.truncate(new_len);
}
}
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits, u8> = NbitsVec::new();
/// vec.resize(24, 0);
/// unsafe {
/// vec.fill_buf(0, 12, 1);
/// vec.fill_buf(12, 12, 2);
/// }
/// println!("{:?}", vec);
/// // Left align will reduce the length.
/// vec.align(1, 0);
/// assert_eq!(vec.len(), 23);
/// assert!((0..).take(11).all(|x| vec.get(x) == 1));
/// assert!((11..).take(12).all(|x| vec.get(x) == 2));
///
/// vec.align(11, 3);
/// assert_eq!(vec.len(), 23 - 8);
/// assert!((0..).take(3).all(|x| vec.get(x) == 1));
/// assert!((3..vec.len()).all(|x| vec.get(x) == 2));
/// // Right align will expand the length.
/// vec.align(6, 7);
/// assert_eq!(vec.len(), 23 - 8 + 1);
/// assert!((6..7).all(|x| vec.get(x) == 0));
/// assert!((7..vec.len()).all(|x| vec.get(x) == 2));
///
/// vec.align(13, 33);
/// assert_eq!(vec.len(), 23 - 8 + 1 + 33 - 13);
/// assert!((13..33).all(|x| vec.get(x) == 0));
/// assert!((33..vec.len()).all(|x| vec.get(x) == 2));
/// println!("{:?}", vec);
/// # }
/// ```
pub fn align(&mut self, offset: usize, to: usize) {
let unit = Self::unit_bits();
let buf_unit = Self::buf_unit_bits();
let unit_cap = buf_unit / unit;
if offset > to {
// Reduce `interval` length.
let interval = offset - to;
// e.g. N = 2, B = u8, interval = 4
if buf_unit % unit == 0 && interval % unit_cap == 0 {
// Copy previous offset * unit % buf_unit values.
let extra = offset % unit_cap;
let (offset, to) = (0..extra).fold((offset, to), |(offset, to), _i| {
let value = self.get(offset);
self.set(to, value);
(offset + 1, to + 1)
});
unsafe {
let ptr = self.buf.ptr();
let src = offset / unit_cap;
let dst = to / unit_cap;
let count = self.len() / unit_cap - src + 1;
ptr::copy(ptr.offset(src as isize), ptr.offset(dst as isize), count);
}
} else {
for offset in offset..self.len() {
let value = self.get(offset);
self.set(offset - interval, value);
}
}
self.len = self.len - interval;
} else {
// Expand with `interval` length values.
let interval = to - offset;
let len = self.len();
self.reserve_exact(interval);
if buf_unit % unit == 0 && interval % unit_cap == 0 {
unsafe {
let ptr = self.buf.ptr();
let src = offset / unit_cap;
let dst = to / unit_cap;
let count = len / unit_cap - src + 1;
ptr::copy(ptr.offset(src as isize), ptr.offset(dst as isize), count);
self.fill_buf(offset, interval, B::zero());
self.len = self.len() + interval;
}
} else {
self.len = len + interval;
for offset in (offset..len).rev() {
let value = self.get(offset);
self.set(offset + interval, value);
}
unsafe {
self.fill_buf(offset, interval, B::zero());
}
}
}
}
/// Fill vector buf as `value` from `index` with size `length`.
///
/// ## Unsafety
///
/// The method doesnot check the index validation of the vector.
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits, u8> = NbitsVec::new();
/// vec.resize(24, 0);
/// println!("{:?}", vec);
/// unsafe {
/// vec.fill_buf(1, 2, 2); // length < buf_unit
/// assert!((1..).take(2).all(|x| vec.get(x) == 2));
/// vec.fill_buf(0, 8, 1); // offset: 0, 0
/// assert!((0..).take(8).all(|x| vec.get(x) == 1));
/// vec.fill_buf(7, 10, 2); // offset: n, n
/// assert!((7..).take(10).all(|x| vec.get(x) == 2));
/// vec.fill_buf(8, 11, 1); // offset: 0,n
/// assert!((8..).take(11).all(|x| vec.get(x) == 1));
/// }
/// # }
/// ```
#[inline]
pub unsafe fn fill_buf(&mut self, index: usize, length: usize, value: B) {
let unit = Self::unit_bits();
if length == 1 {
return self.set_buf_bits(index * unit, unit, value);
}
let buf_unit = Self::buf_unit_bits();
if (length <= buf_unit / unit) || buf_unit % unit != 0 {
println!("length is short");
for i in (index..).take(length) {
self.set_buf_bits(i * unit, unit, value);
}
}
let mul = buf_unit / unit;
let item = (0..mul).fold(B::zero(), |v, _x| v << unit | value);
let ptr = self.buf.ptr();
let write_buf = |start: usize, end: usize| {
(start..end).fold(ptr.offset(start as isize), |ptr, _x| {
ptr::write(ptr, item);
ptr.offset(1)
});
};
match Self::index_range_to_buf(index, length) {
((start_idx, start_offset), (end_idx, end_offset)) if start_offset == 0 &&
end_offset == 0 => {
write_buf(start_idx, end_idx)
}
((start_idx, start_offset), (end_idx, end_offset)) if start_offset == 0 => {
write_buf(start_idx, end_idx);
self.set_buf_unit_bits(end_idx * buf_unit, end_offset, item);
}
((start_idx, start_offset), (end_idx, end_offset)) if end_offset == 0 => {
self.set_buf_unit_bits(index * unit, buf_unit - start_offset, item);
write_buf(start_idx + 1, end_idx);
}
((start_idx, start_offset), (end_idx, end_offset)) => {
self.set_buf_unit_bits(index * unit, buf_unit - start_offset, item);
self.set_buf_unit_bits(end_idx * buf_unit, end_offset, item);
write_buf(start_idx + 1, end_idx);
}
}
}
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// unsafe { vec.set_len(2) }
/// vec.set(0, 0b11);
/// assert_eq!(vec.get(0), 0b11);
/// # }
/// ```
#[inline]
pub fn set(&mut self, index: usize, value: B) {
if index >= self.len {
panic!("attempt to set at {} but only {}", index, self.len);
}
unsafe {
let unit = Self::unit_bits();
self.set_buf_bits(index * unit, unit, value);
}
}
/// Set `bit` at `index`.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.reserve(10);
/// unsafe { vec.set_len(7) };
/// vec.set_bit(0, true);
/// # }
/// ```
///
#[inline]
pub fn set_bit(&mut self, index: usize, bit: bool) {
let bits = self.bits();
if index >= bits {
panic!("attempt to set bit out of bounds");
}
unsafe {
self.set_buf_unit_bit(index, bit);
}
}
/// Get `bit` at some bit index.
///
/// Returns `None` if required index is out of bounds, else return `bool` for bit value.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// vec.reserve(10);
/// assert!(vec.get_bit(0).is_none());
/// vec.resize(10, 0);
/// println!("{:?}", vec);
/// for i in 0..8 {
/// vec.set_bit(i, true);
/// println!("Set at {} as true", i);
/// println!("{:?}", vec);
/// assert_eq!(vec.get_bit(i), Some(true));
/// }
/// for i in 0..8 {
/// vec.set_bit(i, false);
/// assert_eq!(vec.get_bit(i), Some(false));
/// }
/// # }
/// ```
#[inline]
pub fn get_bit(&self, at: usize) -> Option<bool> {
let bits = self.bits();
if at >= bits {
return None;
} else {
unsafe { Some(self.get_buf_unit_bit(at) == B::one()) }
}
}
/// Set `length` bits of buf at `offset`th bit as `value`.
///
/// ## Unsafety
///
/// `set_buf_bits` will not check the `offset`. Users should ensure to do this manually.
///
/// ## Panics
///
/// This method should panic while required `length` is longer than the buf unit bits size.
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
///
/// unsafe {
/// println!("Set buf 0 as 1");
/// vec.set_buf_bits(0, 1, 1);
/// println!("Set buf bits [1, 2] as `10`");
/// vec.set_buf_bits(1, 2, 2);
/// println!("Set buf bits [3, 6] as `1010`");
/// vec.set_buf_bits(3, 4, 0b1010);
/// }
/// println!("{:?}", vec);
/// unsafe {
/// assert_eq!(vec.get_buf_bits(0, 1), 1);
/// assert_eq!(vec.get_buf_bits(1, 2), 2);
/// assert_eq!(vec.get_buf_bits(3, 4), 0b1010);
/// }
/// # }
/// ```
#[inline]
pub unsafe fn set_buf_bits(&mut self, offset: usize, length: usize, value: B) {
let buf_unit = Self::buf_unit_bits();
if length > buf_unit {
panic!("set {} buf bits longer than buf unit bits {}",
length,
buf_unit);
}
if length == 1 {
return self.set_buf_unit_bit(offset, value & B::one() == B::one());
}
match Self::unit_bits() {
unit if unit == buf_unit => {
// NOTE: maybe unreachable!() is better.
self.set_buf_unit_bits(offset, length, value);
}
unit if unit < buf_unit && buf_unit % unit == 0 => {
self.set_buf_unit_bits(offset, length, value);
}
_ => {
let mut v = value;
for x in offset..cmp::min(offset + length, self.buf_bits()) {
self.set_buf_unit_bit(x, v & B::one() == B::one());
v = v >> 1;
}
}
}
}
/// Mask buf element of `index` at offset `(from, to)` as zero.
#[inline]
unsafe fn zero_buf_unit_bits(&mut self, offset: usize, length: usize) {
self.set_buf_unit_bits(offset, length, B::zero());
}
/// Set buf element of `index` at offset `from` to `to` as `value`.
#[inline]
unsafe fn set_buf_unit_bits(&mut self, offset: usize, length: usize, value: B) {
let (index, offset) = Self::bit_index_to_buf(offset);
let mask = (offset..)
.take(length)
.fold(B::zero(), |mask, _x| mask << 1 | B::one()) <<
offset;
let ptr = self.buf.ptr().offset(index as isize);
let cur = ptr::read(ptr);
let new = mask & (value << offset);
let old = mask & cur;
if old != new {
ptr::write(ptr, cur & !mask | new);
}
}
/// Set buf unit bit at `index`th unit of `offset`bit.
#[inline]
unsafe fn set_buf_unit_bit(&mut self, offset: usize, bit: bool) {
let (index, offset) = Self::bit_index_to_buf(offset);
let mask = B::one() << offset;
let ptr = self.buf.ptr().offset(index as isize);
let cur = ptr::read(ptr);
let old = cur >> offset & B::one();
match (old == B::one(), bit) {
(lhs, rhs) if lhs == rhs => (),
(_, true) => ptr::write(ptr, cur | mask),
(_, false) => ptr::write(ptr, cur & mask.not()),
}
}
/// Get `N` bits value as `B`.
///
/// ## TODO
///
/// ?? Is a `Nbits` object is better than `B` ??
///
/// ## Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::with_capacity(10);
/// unsafe { vec.set_len(2) }
/// vec.set(0, 0b11);
/// assert_eq!(vec.get(0), 0b11);
/// # }
/// ```
pub fn get(&self, index: usize) -> B {
if index >= self.len {
panic!("attempt to get at {} but only {}", index, self.len);
}
let unit = Self::unit_bits();
unsafe { self.get_buf_bits(index * unit, unit) }
}
/// Get `length` bits of buf at `offset`th bit.
///
/// # Unsafety
///
/// `get_buf_bits` will not check the `offset`. Users should ensure to do this manually.
///
/// # Panics
///
/// This method should panic while required `length` is longer than the buf unit bits size.
///
/// # Examples
///
/// ```
/// # extern crate bits_vec;
/// # use bits_vec::*;
/// # fn main() {
/// let mut vec: NbitsVec<As2bits> = NbitsVec::new();
/// vec.resize(10, 0);
/// println!("{:?}", vec);
/// for i in 0..8 {
/// vec.set_bit(i, if i % 2 == 0 { true } else { false });
/// }
/// println!("{:?}", vec);
/// unsafe {
/// println!("Get buf bits at 0 with length 1");
/// assert_eq!(vec.get_buf_bits(0, 1), 1);
/// println!("Get buf bits at 1 with length 2");
/// assert_eq!(vec.get_buf_bits(1, 2), 2);
/// println!("Get buf bits at 3 with length 4");
/// assert_eq!(vec.get_buf_bits(3, 4), 0b1010);
/// }
/// # }
/// ```
#[inline]
pub unsafe fn get_buf_bits(&self, offset: usize, length: usize) -> B {
let buf_unit = Self::buf_unit_bits();
if length > buf_unit {
panic!("get {} buf bits longer than buf unit bits {}",
length,
buf_unit);
}
if length == 1 {
return self.get_buf_unit_bit(offset);
}
match (Self::unit_bits(), Self::buf_unit_bits()) {
(unit, buf_unit) if unit == buf_unit => {
// NOTE: maybe unreachable!() is better
self.get_buf_unit_bits(offset, length)
}
(unit, buf_unit) if unit < buf_unit && buf_unit % unit == 0 => {
self.get_buf_unit_bits(offset, length)
}
(_, _) => {
(offset..cmp::min(offset + length, self.buf_bits()))
.map(|x| self.get_buf_unit_bit(x))
.fold(B::zero(), |v, x| v << 1 | x)
}
}
}
/// Get buf unit bit at `index`th unit of `offset`bit.
#[inline]
unsafe fn get_buf_unit_bit(&self, offset: usize) -> B {
let (index, offset) = Self::bit_index_to_buf(offset);
let ptr = self.buf.ptr().offset(index as isize);
ptr::read(ptr) >> offset & B::one()
}
/// Get buf `length` bits of unit at `index`th unit's `offset`th bit
#[inline]
unsafe fn get_buf_unit_bits(&self, offset: usize, length: usize) -> B {
let offset = Self::bit_index_to_buf(offset);
let ptr = self.buf.ptr().offset(offset.0 as isize);
let unit = Self::buf_unit_bits();
(ptr::read(ptr) << (unit - offset.1 - length)) >> (unit - length)
}
/// Converts capacity to storage size
#[inline]
fn capacity_to_buf(capacity: usize) -> usize {
if capacity == 0 {
0
} else {
(capacity * Self::unit_bits() - 1) / (Self::buf_unit_bits()) + 1
}
}
/// Converts the storage size to capacity.
#[inline]
fn capacity_from_buf(buf_cap: usize) -> usize {
buf_cap * Self::buf_unit_bits() / Self::unit_bits()
}
/// Converts the vector index to buf `(index, offset)` tuple.
#[inline]
fn index_to_buf(index: usize) -> (usize, usize) {
let elem_bits = Self::buf_unit_bits();
let bits_index = index * Self::unit_bits();
(bits_index / elem_bits, bits_index % elem_bits)
}
/// Converts the vector index range to buf `(index, offset)` range tuple.
#[inline]
fn index_range_to_buf(index: usize, length: usize) -> ((usize, usize), (usize, usize)) {
(Self::index_to_buf(index),
Self::index_to_buf(index + length))
}
/// Converts bit index to buf `(index, offset)` tuple.
#[inline]
fn bit_index_to_buf(index: usize) -> (usize, usize) {
let unit = Self::buf_unit_bits();
(index / unit, index % unit)
}
/// Returns size of `B`.
#[inline]
fn buf_unit_bits() -> usize {
mem::size_of::<B>() * 8
}
/// Returns unit of bits - that is `NbitsVec`'s `N`.
#[inline]
fn unit_bits() -> usize {
T::bits()
}
}
|
//! History API
use std::collections::VecDeque;
use std::collections::vec_deque;
use std::fs::File;
use std::iter::DoubleEndedIterator;
use std::ops::Index;
use std::path::Path;
#[cfg(unix)]
use libc;
use super::Result;
use config::{Config, HistoryDuplicates};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Direction {
Forward,
Reverse,
}
/// Current state of the history.
pub struct History {
entries: VecDeque<String>,
max_len: usize,
ignore_space: bool,
ignore_dups: bool,
}
impl History {
pub fn new() -> History {
Self::with_config(Config::default())
}
pub fn with_config(config: Config) -> History {
History {
entries: VecDeque::new(),
max_len: config.max_history_size(),
ignore_space: config.history_duplicates() == HistoryDuplicates::IgnoreConsecutive,
ignore_dups: config.history_ignore_space(),
}
}
/// Return the history entry at position `index`, starting from 0.
pub fn get(&self, index: usize) -> Option<&String> {
self.entries.get(index)
}
/// Return the last history entry (i.e. previous command)
pub fn last(&self) -> Option<&String> {
self.entries.back()
}
/// Add a new entry in the history.
pub fn add<S: AsRef<str> + Into<String>>(&mut self, line: S) -> bool {
if self.max_len == 0 {
return false;
}
if line.as_ref().is_empty() ||
(self.ignore_space && line.as_ref().chars().next().map_or(true, |c| c.is_whitespace())) {
return false;
}
if self.ignore_dups {
if let Some(s) = self.entries.back() {
if s == line.as_ref() {
return false;
}
}
}
if self.entries.len() == self.max_len {
self.entries.pop_front();
}
self.entries.push_back(line.into());
true
}
/// Returns the number of entries in the history.
pub fn len(&self) -> usize {
self.entries.len()
}
/// Returns true if the history has no entry.
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
/// Set the maximum length for the history. This function can be called even
/// if there is already some history, the function will make sure to retain
/// just the latest `len` elements if the new history length value is smaller
/// than the amount of items already inside the history.
pub fn set_max_len(&mut self, len: usize) {
self.max_len = len;
if len == 0 {
self.entries.clear();
return;
}
loop {
if self.entries.len() <= len {
break;
}
self.entries.pop_front();
}
}
/// Save the history in the specified file.
/// TODO append_history http://cnswww.cns.cwru.edu/php/chet/readline/history.html#IDX30
/// TODO history_truncate_file http://cnswww.cns.cwru.edu/php/chet/readline/history.html#IDX31
pub fn save<P: AsRef<Path> + ?Sized>(&self, path: &P) -> Result<()> {
use std::io::{BufWriter, Write};
if self.is_empty() {
return Ok(());
}
let old_umask = umask();
let f = File::create(path);
restore_umask(old_umask);
let file = try!(f);
fix_perm(&file);
let mut wtr = BufWriter::new(file);
for entry in &self.entries {
try!(wtr.write_all(&entry.as_bytes()));
try!(wtr.write_all(b"\n"));
}
Ok(())
}
/// Load the history from the specified file.
///
/// # Failure
/// Will return `Err` if path does not already exist.
pub fn load<P: AsRef<Path> + ?Sized>(&mut self, path: &P) -> Result<()> {
use std::io::{BufRead, BufReader};
let file = try!(File::open(&path));
let rdr = BufReader::new(file);
for line in rdr.lines() {
self.add(try!(line).as_ref()); // TODO truncate to MAX_LINE
}
Ok(())
}
/// Clear history
pub fn clear(&mut self) {
self.entries.clear()
}
/// Search history (start position inclusive [0, len-1]).
/// Return the absolute index of the nearest history entry that matches `term`.
/// Return None if no entry contains `term` between [start, len -1] for forward search
/// or between [0, start] for reverse search.
pub fn search(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {
if term.is_empty() || start >= self.len() {
return None;
}
match dir {
Direction::Reverse => {
let index = self.entries
.iter()
.rev()
.skip(self.entries.len() - 1 - start)
.position(|entry| entry.contains(term));
index.and_then(|index| Some(start - index))
}
Direction::Forward => {
let index = self.entries.iter().skip(start).position(|entry| entry.contains(term));
index.and_then(|index| Some(index + start))
}
}
}
/// Return a forward iterator.
pub fn iter(&self) -> Iter {
Iter(self.entries.iter())
}
}
impl Index<usize> for History {
type Output = String;
fn index(&self, index: usize) -> &String {
&self.entries[index]
}
}
impl<'a> IntoIterator for &'a History {
type Item = &'a String;
type IntoIter = Iter<'a>;
fn into_iter(self) -> Iter<'a> {
self.iter()
}
}
/// History iterator.
pub struct Iter<'a>(vec_deque::Iter<'a, String>);
impl<'a> Iterator for Iter<'a> {
type Item = &'a String;
fn next(&mut self) -> Option<&'a String> {
self.0.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a> DoubleEndedIterator for Iter<'a> {
fn next_back(&mut self) -> Option<&'a String> {
self.0.next_back()
}
}
#[cfg(windows)]
fn umask() -> u16 {
0
}
#[cfg(unix)]
fn umask() -> libc::mode_t {
unsafe { libc::umask(libc::S_IXUSR | libc::S_IRWXG | libc::S_IRWXO) }
}
#[cfg(windows)]
fn restore_umask(_: u16) {}
#[cfg(unix)]
fn restore_umask(old_umask: libc::mode_t) {
unsafe {
libc::umask(old_umask);
}
}
#[cfg(windows)]
fn fix_perm(file: &File) {}
#[cfg(unix)]
fn fix_perm(file: &File) {
use std::os::unix::io::AsRawFd;
unsafe {
libc::fchmod(file.as_raw_fd(), libc::S_IRUSR | libc::S_IWUSR);
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use std::path::Path;
use config::Config;
use super::{Direction, History};
fn init() -> History {
let mut history = History::new();
assert!(history.add("line1"));
assert!(history.add("line2"));
assert!(history.add("line3"));
history
}
#[test]
fn new() {
let history = History::new();
assert_eq!(0, history.entries.len());
}
#[test]
fn add() {
let config = Config::builder()
.history_ignore_space(true)
.build();
let mut history = History::with_config(config);
assert_eq!(config.max_history_size(), history.max_len);
assert!(history.add("line1"));
assert!(history.add("line2"));
assert!(!history.add("line2"));
assert!(!history.add(""));
assert!(!history.add(" line3"));
}
#[test]
fn set_max_len() {
let mut history = init();
history.set_max_len(1);
assert_eq!(1, history.entries.len());
assert_eq!(Some(&"line3".to_string()), history.last());
}
#[test]
fn save() {
let mut history = init();
let td = tempdir::TempDir::new_in(&Path::new("."), "histo").unwrap();
let history_path = td.path().join(".history");
history.save(&history_path).unwrap();
history.load(&history_path).unwrap();
td.close().unwrap();
}
#[test]
fn search() {
let history = init();
assert_eq!(None, history.search("", 0, Direction::Forward));
assert_eq!(None, history.search("none", 0, Direction::Forward));
assert_eq!(None, history.search("line", 3, Direction::Forward));
assert_eq!(Some(0), history.search("line", 0, Direction::Forward));
assert_eq!(Some(1), history.search("line", 1, Direction::Forward));
assert_eq!(Some(2), history.search("line3", 1, Direction::Forward));
}
#[test]
fn reverse_search() {
let history = init();
assert_eq!(None, history.search("", 2, Direction::Reverse));
assert_eq!(None, history.search("none", 2, Direction::Reverse));
assert_eq!(None, history.search("line", 3, Direction::Reverse));
assert_eq!(Some(2), history.search("line", 2, Direction::Reverse));
assert_eq!(Some(1), history.search("line", 1, Direction::Reverse));
assert_eq!(Some(0), history.search("line1", 1, Direction::Reverse));
}
}
Fix warning on windows
//! History API
use std::collections::VecDeque;
use std::collections::vec_deque;
use std::fs::File;
use std::iter::DoubleEndedIterator;
use std::ops::Index;
use std::path::Path;
#[cfg(unix)]
use libc;
use super::Result;
use config::{Config, HistoryDuplicates};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Direction {
Forward,
Reverse,
}
/// Current state of the history.
pub struct History {
entries: VecDeque<String>,
max_len: usize,
ignore_space: bool,
ignore_dups: bool,
}
impl History {
pub fn new() -> History {
Self::with_config(Config::default())
}
pub fn with_config(config: Config) -> History {
History {
entries: VecDeque::new(),
max_len: config.max_history_size(),
ignore_space: config.history_duplicates() == HistoryDuplicates::IgnoreConsecutive,
ignore_dups: config.history_ignore_space(),
}
}
/// Return the history entry at position `index`, starting from 0.
pub fn get(&self, index: usize) -> Option<&String> {
self.entries.get(index)
}
/// Return the last history entry (i.e. previous command)
pub fn last(&self) -> Option<&String> {
self.entries.back()
}
/// Add a new entry in the history.
pub fn add<S: AsRef<str> + Into<String>>(&mut self, line: S) -> bool {
if self.max_len == 0 {
return false;
}
if line.as_ref().is_empty() ||
(self.ignore_space && line.as_ref().chars().next().map_or(true, |c| c.is_whitespace())) {
return false;
}
if self.ignore_dups {
if let Some(s) = self.entries.back() {
if s == line.as_ref() {
return false;
}
}
}
if self.entries.len() == self.max_len {
self.entries.pop_front();
}
self.entries.push_back(line.into());
true
}
/// Returns the number of entries in the history.
pub fn len(&self) -> usize {
self.entries.len()
}
/// Returns true if the history has no entry.
pub fn is_empty(&self) -> bool {
self.entries.is_empty()
}
/// Set the maximum length for the history. This function can be called even
/// if there is already some history, the function will make sure to retain
/// just the latest `len` elements if the new history length value is smaller
/// than the amount of items already inside the history.
pub fn set_max_len(&mut self, len: usize) {
self.max_len = len;
if len == 0 {
self.entries.clear();
return;
}
loop {
if self.entries.len() <= len {
break;
}
self.entries.pop_front();
}
}
/// Save the history in the specified file.
/// TODO append_history http://cnswww.cns.cwru.edu/php/chet/readline/history.html#IDX30
/// TODO history_truncate_file http://cnswww.cns.cwru.edu/php/chet/readline/history.html#IDX31
pub fn save<P: AsRef<Path> + ?Sized>(&self, path: &P) -> Result<()> {
use std::io::{BufWriter, Write};
if self.is_empty() {
return Ok(());
}
let old_umask = umask();
let f = File::create(path);
restore_umask(old_umask);
let file = try!(f);
fix_perm(&file);
let mut wtr = BufWriter::new(file);
for entry in &self.entries {
try!(wtr.write_all(&entry.as_bytes()));
try!(wtr.write_all(b"\n"));
}
Ok(())
}
/// Load the history from the specified file.
///
/// # Failure
/// Will return `Err` if path does not already exist.
pub fn load<P: AsRef<Path> + ?Sized>(&mut self, path: &P) -> Result<()> {
use std::io::{BufRead, BufReader};
let file = try!(File::open(&path));
let rdr = BufReader::new(file);
for line in rdr.lines() {
self.add(try!(line).as_ref()); // TODO truncate to MAX_LINE
}
Ok(())
}
/// Clear history
pub fn clear(&mut self) {
self.entries.clear()
}
/// Search history (start position inclusive [0, len-1]).
/// Return the absolute index of the nearest history entry that matches `term`.
/// Return None if no entry contains `term` between [start, len -1] for forward search
/// or between [0, start] for reverse search.
pub fn search(&self, term: &str, start: usize, dir: Direction) -> Option<usize> {
if term.is_empty() || start >= self.len() {
return None;
}
match dir {
Direction::Reverse => {
let index = self.entries
.iter()
.rev()
.skip(self.entries.len() - 1 - start)
.position(|entry| entry.contains(term));
index.and_then(|index| Some(start - index))
}
Direction::Forward => {
let index = self.entries.iter().skip(start).position(|entry| entry.contains(term));
index.and_then(|index| Some(index + start))
}
}
}
/// Return a forward iterator.
pub fn iter(&self) -> Iter {
Iter(self.entries.iter())
}
}
impl Index<usize> for History {
type Output = String;
fn index(&self, index: usize) -> &String {
&self.entries[index]
}
}
impl<'a> IntoIterator for &'a History {
type Item = &'a String;
type IntoIter = Iter<'a>;
fn into_iter(self) -> Iter<'a> {
self.iter()
}
}
/// History iterator.
pub struct Iter<'a>(vec_deque::Iter<'a, String>);
impl<'a> Iterator for Iter<'a> {
type Item = &'a String;
fn next(&mut self) -> Option<&'a String> {
self.0.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a> DoubleEndedIterator for Iter<'a> {
fn next_back(&mut self) -> Option<&'a String> {
self.0.next_back()
}
}
#[cfg(windows)]
fn umask() -> u16 {
0
}
#[cfg(unix)]
fn umask() -> libc::mode_t {
unsafe { libc::umask(libc::S_IXUSR | libc::S_IRWXG | libc::S_IRWXO) }
}
#[cfg(windows)]
fn restore_umask(_: u16) {}
#[cfg(unix)]
fn restore_umask(old_umask: libc::mode_t) {
unsafe {
libc::umask(old_umask);
}
}
#[cfg(windows)]
fn fix_perm(_: &File) {}
#[cfg(unix)]
fn fix_perm(file: &File) {
use std::os::unix::io::AsRawFd;
unsafe {
libc::fchmod(file.as_raw_fd(), libc::S_IRUSR | libc::S_IWUSR);
}
}
#[cfg(test)]
mod tests {
extern crate tempdir;
use std::path::Path;
use config::Config;
use super::{Direction, History};
fn init() -> History {
let mut history = History::new();
assert!(history.add("line1"));
assert!(history.add("line2"));
assert!(history.add("line3"));
history
}
#[test]
fn new() {
let history = History::new();
assert_eq!(0, history.entries.len());
}
#[test]
fn add() {
let config = Config::builder()
.history_ignore_space(true)
.build();
let mut history = History::with_config(config);
assert_eq!(config.max_history_size(), history.max_len);
assert!(history.add("line1"));
assert!(history.add("line2"));
assert!(!history.add("line2"));
assert!(!history.add(""));
assert!(!history.add(" line3"));
}
#[test]
fn set_max_len() {
let mut history = init();
history.set_max_len(1);
assert_eq!(1, history.entries.len());
assert_eq!(Some(&"line3".to_string()), history.last());
}
#[test]
fn save() {
let mut history = init();
let td = tempdir::TempDir::new_in(&Path::new("."), "histo").unwrap();
let history_path = td.path().join(".history");
history.save(&history_path).unwrap();
history.load(&history_path).unwrap();
td.close().unwrap();
}
#[test]
fn search() {
let history = init();
assert_eq!(None, history.search("", 0, Direction::Forward));
assert_eq!(None, history.search("none", 0, Direction::Forward));
assert_eq!(None, history.search("line", 3, Direction::Forward));
assert_eq!(Some(0), history.search("line", 0, Direction::Forward));
assert_eq!(Some(1), history.search("line", 1, Direction::Forward));
assert_eq!(Some(2), history.search("line3", 1, Direction::Forward));
}
#[test]
fn reverse_search() {
let history = init();
assert_eq!(None, history.search("", 2, Direction::Reverse));
assert_eq!(None, history.search("none", 2, Direction::Reverse));
assert_eq!(None, history.search("line", 3, Direction::Reverse));
assert_eq!(Some(2), history.search("line", 2, Direction::Reverse));
assert_eq!(Some(1), history.search("line", 1, Direction::Reverse));
assert_eq!(Some(0), history.search("line1", 1, Direction::Reverse));
}
}
|
use super::*;
use std::collections::VecDeque;
use std::io::{BufReader, BufRead, Error, ErrorKind};
use std::fs::{File, OpenOptions};
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::ops::Index;
use std::ops::IndexMut;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::mpsc::{channel, Sender};
use std::thread::{sleep, spawn, JoinHandle};
use std::time::Duration;
use bytecount::count;
const DEFAULT_MAX_SIZE: usize = 1000;
/// Structure encapsulating command history
pub struct History {
// TODO: this should eventually be private
/// Vector of buffers to store history in
pub buffers: VecDeque<Buffer>,
/// Store a filename to save history into; if None don't save history
file_name: Option<String>,
/// Maximal number of buffers stored in the memory
/// TODO: just make this public?
max_size: usize,
/// Maximal number of lines stored in the file
// TODO: just make this public?
max_file_size: Arc<AtomicUsize>,
/// Handle to the background thread managing writes to the history file
bg_handle: Option<JoinHandle<()>>,
/// Signals the background thread to stop when dropping the struct
bg_stop: Arc<AtomicBool>,
/// Sends commands to write to the history file
sender: Sender<(Buffer, String)>,
// TODO set from environment variable?
pub append_duplicate_entries: bool,
}
impl History {
/// It's important to execute this function before exiting your program, as it will
/// ensure that all history data has been written to the disk.
pub fn commit_history(&mut self) {
// Signal the background thread to stop
self.bg_stop.store(true, Ordering::Relaxed);
// Wait for the background thread to stop
if let Some(handle) = self.bg_handle.take() {
let _ = handle.join();
}
}
/// Create new History structure.
pub fn new() -> History {
let max_file_size = Arc::new(AtomicUsize::new(DEFAULT_MAX_SIZE));
let bg_stop = Arc::new(AtomicBool::new(false));
let (sender, receiver) = channel();
let stop_signal = bg_stop.clone();
let max_size = max_file_size.clone();
History {
buffers: VecDeque::with_capacity(DEFAULT_MAX_SIZE),
file_name: None,
sender: sender,
bg_handle: Some(spawn(move || {
while !stop_signal.load(Ordering::Relaxed) {
if let Ok((command, filepath)) = receiver.try_recv() {
let max_file_size = max_size.load(Ordering::Relaxed);
let _ = write_to_disk(max_file_size, &command, &filepath);
}
sleep(Duration::from_millis(100));
}
// Deplete the receiver of commands to write, before exiting the thread.
while let Ok((command, filepath)) = receiver.try_recv() {
let max_file_size = max_size.load(Ordering::Relaxed);
let _ = write_to_disk(max_file_size, &command, &filepath);
}
})),
bg_stop: bg_stop,
max_size: DEFAULT_MAX_SIZE,
max_file_size: max_file_size,
append_duplicate_entries: false,
}
}
/// Number of items in history.
pub fn len(&self) -> usize {
self.buffers.len()
}
/// Add a command to the history buffer and remove the oldest commands when the max history
/// size has been met. If writing to the disk is enabled, this function will be used for
/// logging history to the designated history file.
pub fn push(&mut self, new_item: Buffer) -> io::Result<()> {
self.file_name
.as_ref()
.map(|name| {
let _ = self.sender.send((new_item.clone(), name.to_owned()));
});
// buffers[0] is the oldest entry
// the new entry goes to the end
if !self.append_duplicate_entries &&
self.buffers.back().map(|b| b.to_string()) == Some(new_item.to_string())
{
return Ok(());
}
self.buffers.push_back(new_item);
while self.buffers.len() > self.max_size {
self.buffers.pop_front();
}
Ok(())
}
/// Go through the history and try to find a buffer which starts the same as the new buffer
/// given to this function as argument.
pub fn get_newest_match<'a, 'b>(
&'a self,
curr_position: Option<usize>,
new_buff: &'b Buffer,
) -> Option<&'a Buffer> {
let pos = curr_position.unwrap_or(self.buffers.len());
for iter in (0..pos).rev() {
if let Some(tested) = self.buffers.get(iter) {
if tested.starts_with(new_buff) {
return self.buffers.get(iter);
}
}
}
None
}
/// Get the history file name.
pub fn file_name(&self) -> Option<&str> {
match self.file_name {
Some(ref s) => Some(&s[..]),
None => None,
}
}
/// Set history file name. At the same time enable history.
pub fn set_file_name(&mut self, name: Option<String>) {
self.file_name = name;
}
/// Set maximal number of buffers stored in memory
pub fn set_max_size(&mut self, size: usize) {
self.max_size = size;
}
/// Set maximal number of entries in history file
pub fn set_max_file_size(&mut self, size: usize) {
self.max_file_size.store(size, Ordering::Relaxed);
}
/// Load history from given file name
pub fn load_history(&mut self) -> io::Result<()> {
let file_name = match self.file_name.clone() {
Some(name) => name,
None => {
return Err(Error::new(
ErrorKind::Other,
"Liner: file name not specified",
))
}
};
let file = try!(OpenOptions::new().read(true).open(file_name));
let reader = BufReader::new(file);
for line in reader.lines() {
match line {
Ok(line) => self.buffers.push_back(Buffer::from(line)),
Err(_) => break,
}
}
Ok(())
}
}
impl Index<usize> for History {
type Output = Buffer;
fn index(&self, index: usize) -> &Buffer {
&self.buffers[index]
}
}
impl IndexMut<usize> for History {
fn index_mut(&mut self, index: usize) -> &mut Buffer {
&mut self.buffers[index]
}
}
/// Perform write operation. If the history file does not exist, it will be created.
/// This function is not part of the public interface.
/// XXX: include more information in the file (like fish does)
fn write_to_disk(max_file_size: usize, new_item: &Buffer, file_name: &str) -> io::Result<()> {
let ret = match OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(file_name) {
Ok(mut file) => {
// The metadata contains the length of the file
let file_length = file.metadata().ok().map_or(0u64, |m| m.len());
// 4K byte buffer for reading chunks of the file at once.
let mut buffer = [0; 4096];
// Determine the number of commands stored.
{
let mut seek_point = 0u64;
let mut stored = 0;
let mut total_read = 0u64;
let mut rfile = File::open(file_name).unwrap();
loop {
// Read 4K of bytes all at once into the buffer.
let read = rfile.read(&mut buffer)? as u64;
// If EOF is found, don't seek at all.
if read == 0 { break }
// Count the number of commands that were found in the current buffer.
let cmds_read = count(&buffer, b'\n');
// If stored + read >= max file size, a seek point is in the current buffer.
if stored + cmds_read >= max_file_size {
for &byte in buffer.iter() {
total_read += 1;
if byte == b'\n' {
stored += 1;
if stored == max_file_size {
seek_point = total_read;
break
}
}
}
try!(file.seek(SeekFrom::Start(seek_point as u64)));
let mut buffer: Vec<u8> = Vec::with_capacity((file_length - seek_point) as usize);
try!(file.read_to_end(&mut buffer));
try!(file.set_len(0));
try!(io::copy(&mut buffer.as_slice(), &mut file));
break
} else {
total_read += read;
stored += cmds_read;
}
}
};
// Seek to end for appending
try!(file.seek(SeekFrom::End(0)));
// Write the command to the history file.
try!(file.write_all(String::from(new_item.clone()).as_bytes()));
try!(file.write_all(b"\n"));
file.flush()?;
Ok(())
}
Err(message) => Err(message),
};
ret
}
Fixed panic in write_to_disk: when less than 4096 bytes were read it would still try to use the whole buffer which would inflate the file size
use super::*;
use std::collections::VecDeque;
use std::io::{BufReader, BufRead, Error, ErrorKind};
use std::fs::{File, OpenOptions};
use std::io::{self, Read, Seek, SeekFrom, Write};
use std::ops::Index;
use std::ops::IndexMut;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::mpsc::{channel, Sender};
use std::thread::{sleep, spawn, JoinHandle};
use std::time::Duration;
use bytecount::count;
const DEFAULT_MAX_SIZE: usize = 1000;
/// Structure encapsulating command history
pub struct History {
// TODO: this should eventually be private
/// Vector of buffers to store history in
pub buffers: VecDeque<Buffer>,
/// Store a filename to save history into; if None don't save history
file_name: Option<String>,
/// Maximal number of buffers stored in the memory
/// TODO: just make this public?
max_size: usize,
/// Maximal number of lines stored in the file
// TODO: just make this public?
max_file_size: Arc<AtomicUsize>,
/// Handle to the background thread managing writes to the history file
bg_handle: Option<JoinHandle<()>>,
/// Signals the background thread to stop when dropping the struct
bg_stop: Arc<AtomicBool>,
/// Sends commands to write to the history file
sender: Sender<(Buffer, String)>,
// TODO set from environment variable?
pub append_duplicate_entries: bool,
}
impl History {
/// It's important to execute this function before exiting your program, as it will
/// ensure that all history data has been written to the disk.
pub fn commit_history(&mut self) {
// Signal the background thread to stop
self.bg_stop.store(true, Ordering::Relaxed);
// Wait for the background thread to stop
if let Some(handle) = self.bg_handle.take() {
let _ = handle.join();
}
}
/// Create new History structure.
pub fn new() -> History {
let max_file_size = Arc::new(AtomicUsize::new(DEFAULT_MAX_SIZE));
let bg_stop = Arc::new(AtomicBool::new(false));
let (sender, receiver) = channel();
let stop_signal = bg_stop.clone();
let max_size = max_file_size.clone();
History {
buffers: VecDeque::with_capacity(DEFAULT_MAX_SIZE),
file_name: None,
sender: sender,
bg_handle: Some(spawn(move || {
while !stop_signal.load(Ordering::Relaxed) {
if let Ok((command, filepath)) = receiver.try_recv() {
let max_file_size = max_size.load(Ordering::Relaxed);
let _ = write_to_disk(max_file_size, &command, &filepath);
}
sleep(Duration::from_millis(100));
}
// Deplete the receiver of commands to write, before exiting the thread.
while let Ok((command, filepath)) = receiver.try_recv() {
let max_file_size = max_size.load(Ordering::Relaxed);
let _ = write_to_disk(max_file_size, &command, &filepath);
}
})),
bg_stop: bg_stop,
max_size: DEFAULT_MAX_SIZE,
max_file_size: max_file_size,
append_duplicate_entries: false,
}
}
/// Number of items in history.
pub fn len(&self) -> usize {
self.buffers.len()
}
/// Add a command to the history buffer and remove the oldest commands when the max history
/// size has been met. If writing to the disk is enabled, this function will be used for
/// logging history to the designated history file.
pub fn push(&mut self, new_item: Buffer) -> io::Result<()> {
self.file_name
.as_ref()
.map(|name| {
let _ = self.sender.send((new_item.clone(), name.to_owned()));
});
// buffers[0] is the oldest entry
// the new entry goes to the end
if !self.append_duplicate_entries &&
self.buffers.back().map(|b| b.to_string()) == Some(new_item.to_string())
{
return Ok(());
}
self.buffers.push_back(new_item);
while self.buffers.len() > self.max_size {
self.buffers.pop_front();
}
Ok(())
}
/// Go through the history and try to find a buffer which starts the same as the new buffer
/// given to this function as argument.
pub fn get_newest_match<'a, 'b>(
&'a self,
curr_position: Option<usize>,
new_buff: &'b Buffer,
) -> Option<&'a Buffer> {
let pos = curr_position.unwrap_or(self.buffers.len());
for iter in (0..pos).rev() {
if let Some(tested) = self.buffers.get(iter) {
if tested.starts_with(new_buff) {
return self.buffers.get(iter);
}
}
}
None
}
/// Get the history file name.
pub fn file_name(&self) -> Option<&str> {
match self.file_name {
Some(ref s) => Some(&s[..]),
None => None,
}
}
/// Set history file name. At the same time enable history.
pub fn set_file_name(&mut self, name: Option<String>) {
self.file_name = name;
}
/// Set maximal number of buffers stored in memory
pub fn set_max_size(&mut self, size: usize) {
self.max_size = size;
}
/// Set maximal number of entries in history file
pub fn set_max_file_size(&mut self, size: usize) {
self.max_file_size.store(size, Ordering::Relaxed);
}
/// Load history from given file name
pub fn load_history(&mut self) -> io::Result<()> {
let file_name = match self.file_name.clone() {
Some(name) => name,
None => {
return Err(Error::new(
ErrorKind::Other,
"Liner: file name not specified",
))
}
};
let file = try!(OpenOptions::new().read(true).open(file_name));
let reader = BufReader::new(file);
for line in reader.lines() {
match line {
Ok(line) => self.buffers.push_back(Buffer::from(line)),
Err(_) => break,
}
}
Ok(())
}
}
impl Index<usize> for History {
type Output = Buffer;
fn index(&self, index: usize) -> &Buffer {
&self.buffers[index]
}
}
impl IndexMut<usize> for History {
fn index_mut(&mut self, index: usize) -> &mut Buffer {
&mut self.buffers[index]
}
}
/// Perform write operation. If the history file does not exist, it will be created.
/// This function is not part of the public interface.
/// XXX: include more information in the file (like fish does)
fn write_to_disk(max_file_size: usize, new_item: &Buffer, file_name: &str) -> io::Result<()> {
let ret = match OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(file_name) {
Ok(mut file) => {
// The metadata contains the length of the file
let file_length = file.metadata().ok().map_or(0u64, |m| m.len());
// 4K byte buffer for reading chunks of the file at once.
let mut buffer = [0; 4096];
// Determine the number of commands stored.
{
let mut seek_point = 0u64;
let mut stored = 0;
let mut total_read = 0u64;
let mut rfile = File::open(file_name).unwrap();
loop {
// Read 4K of bytes all at once into the buffer.
let read = rfile.read(&mut buffer)?;
// If EOF is found, don't seek at all.
if read == 0 { break }
// Count the number of commands that were found in the current buffer.
let cmds_read = count(&buffer[0..read], b'\n');
// If stored + read >= max file size, a seek point is in the current buffer.
if stored + cmds_read >= max_file_size {
for &byte in buffer[0..read].iter() {
total_read += 1;
if byte == b'\n' {
stored += 1;
if stored == max_file_size {
seek_point = total_read;
break
}
}
}
try!(file.seek(SeekFrom::Start(seek_point as u64)));
let mut buffer: Vec<u8> = Vec::with_capacity((file_length - seek_point) as usize);
try!(file.read_to_end(&mut buffer));
try!(file.set_len(0));
try!(io::copy(&mut buffer.as_slice(), &mut file));
break
} else {
total_read += read as u64;
stored += cmds_read;
}
}
};
// Seek to end for appending
try!(file.seek(SeekFrom::End(0)));
// Write the command to the history file.
try!(file.write_all(String::from(new_item.clone()).as_bytes()));
try!(file.write_all(b"\n"));
file.flush()?;
Ok(())
}
Err(message) => Err(message),
};
ret
}
|
// Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! tty related functionality
//!
use std::env;
use std::ffi::CStr;
use std::fs::File;
use std::mem;
use std::os::unix::io::FromRawFd;
use std::ptr;
use libc::{self, winsize, c_int, pid_t, WNOHANG, WIFEXITED, WEXITSTATUS, SIGCHLD};
use term::SizeInfo;
use display::OnResize;
use config::Config;
/// Process ID of child process
///
/// Necessary to put this in static storage for `sigchld` to have access
static mut PID: pid_t = 0;
/// Exit flag
///
/// Calling exit() in the SIGCHLD handler sometimes causes opengl to deadlock,
/// and the process hangs. Instead, this flag is set, and its status can be
/// cheked via `process_should_exit`.
static mut SHOULD_EXIT: bool = false;
extern "C" fn sigchld(_a: c_int) {
let mut status: c_int = 0;
unsafe {
let p = libc::waitpid(PID, &mut status, WNOHANG);
if p < 0 {
die!("Waiting for pid {} failed: {}\n", PID, errno());
}
if PID != p {
return;
}
if !WIFEXITED(status) || WEXITSTATUS(status) != 0 {
die!("child finished with error '{}'\n", status);
}
SHOULD_EXIT = true;
}
}
pub fn process_should_exit() -> bool {
unsafe { SHOULD_EXIT }
}
/// Get the current value of errno
fn errno() -> c_int {
::errno::errno().0
}
enum Relation {
Child,
Parent(pid_t)
}
fn fork() -> Relation {
let res = unsafe {
libc::fork()
};
if res < 0 {
die!("fork failed");
}
if res == 0 {
Relation::Child
} else {
Relation::Parent(res)
}
}
/// Get raw fds for master/slave ends of a new pty
#[cfg(target_os = "linux")]
fn openpty(rows: u8, cols: u8) -> (c_int, c_int) {
let mut master: c_int = 0;
let mut slave: c_int = 0;
let win = winsize {
ws_row: rows as libc::c_ushort,
ws_col: cols as libc::c_ushort,
ws_xpixel: 0,
ws_ypixel: 0,
};
let res = unsafe {
libc::openpty(&mut master, &mut slave, ptr::null_mut(), ptr::null(), &win)
};
if res < 0 {
die!("openpty failed");
}
(master, slave)
}
#[cfg(target_os = "macos")]
fn openpty(rows: u8, cols: u8) -> (c_int, c_int) {
let mut master: c_int = 0;
let mut slave: c_int = 0;
let mut win = winsize {
ws_row: rows as libc::c_ushort,
ws_col: cols as libc::c_ushort,
ws_xpixel: 0,
ws_ypixel: 0,
};
let res = unsafe {
libc::openpty(&mut master, &mut slave, ptr::null_mut(), ptr::null_mut(), &mut win)
};
if res < 0 {
die!("openpty failed");
}
(master, slave)
}
/// Really only needed on BSD, but should be fine elsewhere
fn set_controlling_terminal(fd: c_int) {
let res = unsafe {
libc::ioctl(fd, libc::TIOCSCTTY as _, 0)
};
if res < 0 {
die!("ioctl TIOCSCTTY failed: {}", errno());
}
}
#[derive(Debug)]
struct Passwd<'a> {
name: &'a str,
passwd: &'a str,
uid: libc::uid_t,
gid: libc::gid_t,
gecos: &'a str,
dir: &'a str,
shell: &'a str,
}
/// Return a Passwd struct with pointers into the provided buf
///
/// # Unsafety
///
/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.
fn get_pw_entry(buf: &mut [i8; 1024]) -> Passwd {
// Create zeroed passwd struct
let mut entry: libc::passwd = unsafe { ::std::mem::uninitialized() };
let mut res: *mut libc::passwd = ptr::null_mut();
// Try and read the pw file.
let uid = unsafe { libc::getuid() };
let status = unsafe {
libc::getpwuid_r(uid, &mut entry, buf.as_mut_ptr() as *mut _, buf.len(), &mut res)
};
if status < 0 {
die!("getpwuid_r failed");
}
if res.is_null() {
die!("pw not found");
}
// sanity check
assert_eq!(entry.pw_uid, uid);
// Build a borrowed Passwd struct
//
// Transmute is used here to conveniently cast from the raw CStr to a &str with the appropriate
// lifetime.
Passwd {
name: unsafe { mem::transmute(CStr::from_ptr(entry.pw_name).to_str().unwrap()) },
passwd: unsafe { mem::transmute(CStr::from_ptr(entry.pw_passwd).to_str().unwrap()) },
uid: entry.pw_uid,
gid: entry.pw_gid,
gecos: unsafe { mem::transmute(CStr::from_ptr(entry.pw_gecos).to_str().unwrap()) },
dir: unsafe { mem::transmute(CStr::from_ptr(entry.pw_dir).to_str().unwrap()) },
shell: unsafe { mem::transmute(CStr::from_ptr(entry.pw_shell).to_str().unwrap()) },
}
}
/// Exec a shell
fn execsh(config: &Config) -> ! {
let mut buf = [0; 1024];
let pw = get_pw_entry(&mut buf);
let shell = match config.shell() {
Some(shell) => match shell.to_str() {
Some(shell) => shell,
None => die!("Invalid shell value")
},
None => pw.shell
};
// setup environment
env::set_var("LOGNAME", pw.name);
env::set_var("USER", pw.name);
env::set_var("SHELL", shell);
env::set_var("HOME", pw.dir);
env::set_var("TERM", "xterm-256color"); // sigh
unsafe {
libc::signal(libc::SIGCHLD, libc::SIG_DFL);
libc::signal(libc::SIGHUP, libc::SIG_DFL);
libc::signal(libc::SIGINT, libc::SIG_DFL);
libc::signal(libc::SIGQUIT, libc::SIG_DFL);
libc::signal(libc::SIGTERM, libc::SIG_DFL);
libc::signal(libc::SIGALRM, libc::SIG_DFL);
}
// pw.shell is null terminated
let shell = unsafe { CStr::from_ptr(shell.as_ptr() as *const _) };
let argv = [shell.as_ptr(), ptr::null()];
let res = unsafe {
libc::execvp(shell.as_ptr(), argv.as_ptr())
};
if res < 0 {
die!("execvp failed: {}", errno());
}
::std::process::exit(1);
}
/// Create a new tty and return a handle to interact with it.
pub fn new<T: ToWinsize>(config: &Config, size: T) -> Pty {
let win = size.to_winsize();
let (master, slave) = openpty(win.ws_row as _, win.ws_col as _);
match fork() {
Relation::Child => {
unsafe {
// Create a new process group
libc::setsid();
// Duplicate pty slave to be child stdin, stdoud, and stderr
libc::dup2(slave, 0);
libc::dup2(slave, 1);
libc::dup2(slave, 2);
}
set_controlling_terminal(slave);
// No longer need slave/master fds
unsafe {
libc::close(slave);
libc::close(master);
}
// Exec a shell!
execsh(config);
},
Relation::Parent(pid) => {
unsafe {
// Set PID for SIGCHLD handler
PID = pid;
// Handle SIGCHLD
libc::signal(SIGCHLD, sigchld as _);
// Parent doesn't need slave fd
libc::close(slave);
}
unsafe {
// Maybe this should be done outside of this function so nonblocking
// isn't forced upon consumers. Although maybe it should be?
set_nonblocking(master);
}
let pty = Pty { fd: master };
pty.resize(size);
pty
}
}
}
pub struct Pty {
fd: c_int,
}
impl Pty {
/// Get reader for the TTY
///
/// XXX File is a bad abstraction here; it closes the fd on drop
pub fn reader(&self) -> File {
unsafe {
File::from_raw_fd(self.fd)
}
}
/// Resize the pty
///
/// Tells the kernel that the window size changed with the new pixel
/// dimensions and line/column counts.
pub fn resize<T: ToWinsize>(&self, size: T) {
let win = size.to_winsize();
let res = unsafe {
libc::ioctl(self.fd, libc::TIOCSWINSZ, &win as *const _)
};
if res < 0 {
die!("ioctl TIOCSWINSZ failed: {}", errno());
}
}
}
/// Types that can produce a `libc::winsize`
pub trait ToWinsize {
/// Get a `libc::winsize`
fn to_winsize(&self) -> winsize;
}
impl<'a> ToWinsize for &'a SizeInfo {
fn to_winsize(&self) -> winsize {
winsize {
ws_row: self.lines().0 as libc::c_ushort,
ws_col: self.cols().0 as libc::c_ushort,
ws_xpixel: self.width as libc::c_ushort,
ws_ypixel: self.height as libc::c_ushort,
}
}
}
impl OnResize for Pty {
fn on_resize(&mut self, size: &SizeInfo) {
self.resize(size);
}
}
unsafe fn set_nonblocking(fd: c_int) {
use libc::{fcntl, F_SETFL, F_GETFL, O_NONBLOCK};
let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);
assert_eq!(res, 0);
}
#[test]
fn test_get_pw_entry() {
let mut buf: [i8; 1024] = [0; 1024];
let pw = get_pw_entry(&mut buf);
println!("{:?}", pw);
}
Cleanup getpwuid_r wrapper
The wrapper had some transmutes still from an earlier implementation,
and they are not needed now.
// Copyright 2016 Joe Wilm, The Alacritty Project Contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! tty related functionality
//!
use std::env;
use std::ffi::CStr;
use std::fs::File;
use std::os::unix::io::FromRawFd;
use std::ptr;
use libc::{self, winsize, c_int, pid_t, WNOHANG, WIFEXITED, WEXITSTATUS, SIGCHLD};
use term::SizeInfo;
use display::OnResize;
use config::Config;
/// Process ID of child process
///
/// Necessary to put this in static storage for `sigchld` to have access
static mut PID: pid_t = 0;
/// Exit flag
///
/// Calling exit() in the SIGCHLD handler sometimes causes opengl to deadlock,
/// and the process hangs. Instead, this flag is set, and its status can be
/// cheked via `process_should_exit`.
static mut SHOULD_EXIT: bool = false;
extern "C" fn sigchld(_a: c_int) {
let mut status: c_int = 0;
unsafe {
let p = libc::waitpid(PID, &mut status, WNOHANG);
if p < 0 {
die!("Waiting for pid {} failed: {}\n", PID, errno());
}
if PID != p {
return;
}
if !WIFEXITED(status) || WEXITSTATUS(status) != 0 {
die!("child finished with error '{}'\n", status);
}
SHOULD_EXIT = true;
}
}
pub fn process_should_exit() -> bool {
unsafe { SHOULD_EXIT }
}
/// Get the current value of errno
fn errno() -> c_int {
::errno::errno().0
}
enum Relation {
Child,
Parent(pid_t)
}
fn fork() -> Relation {
let res = unsafe {
libc::fork()
};
if res < 0 {
die!("fork failed");
}
if res == 0 {
Relation::Child
} else {
Relation::Parent(res)
}
}
/// Get raw fds for master/slave ends of a new pty
#[cfg(target_os = "linux")]
fn openpty(rows: u8, cols: u8) -> (c_int, c_int) {
let mut master: c_int = 0;
let mut slave: c_int = 0;
let win = winsize {
ws_row: rows as libc::c_ushort,
ws_col: cols as libc::c_ushort,
ws_xpixel: 0,
ws_ypixel: 0,
};
let res = unsafe {
libc::openpty(&mut master, &mut slave, ptr::null_mut(), ptr::null(), &win)
};
if res < 0 {
die!("openpty failed");
}
(master, slave)
}
#[cfg(target_os = "macos")]
fn openpty(rows: u8, cols: u8) -> (c_int, c_int) {
let mut master: c_int = 0;
let mut slave: c_int = 0;
let mut win = winsize {
ws_row: rows as libc::c_ushort,
ws_col: cols as libc::c_ushort,
ws_xpixel: 0,
ws_ypixel: 0,
};
let res = unsafe {
libc::openpty(&mut master, &mut slave, ptr::null_mut(), ptr::null_mut(), &mut win)
};
if res < 0 {
die!("openpty failed");
}
(master, slave)
}
/// Really only needed on BSD, but should be fine elsewhere
fn set_controlling_terminal(fd: c_int) {
let res = unsafe {
libc::ioctl(fd, libc::TIOCSCTTY as _, 0)
};
if res < 0 {
die!("ioctl TIOCSCTTY failed: {}", errno());
}
}
#[derive(Debug)]
struct Passwd<'a> {
name: &'a str,
passwd: &'a str,
uid: libc::uid_t,
gid: libc::gid_t,
gecos: &'a str,
dir: &'a str,
shell: &'a str,
}
/// Return a Passwd struct with pointers into the provided buf
///
/// # Unsafety
///
/// If `buf` is changed while `Passwd` is alive, bad thing will almost certainly happen.
fn get_pw_entry(buf: &mut [i8; 1024]) -> Passwd {
// Create zeroed passwd struct
let mut entry: libc::passwd = unsafe { ::std::mem::uninitialized() };
let mut res: *mut libc::passwd = ptr::null_mut();
// Try and read the pw file.
let uid = unsafe { libc::getuid() };
let status = unsafe {
libc::getpwuid_r(uid, &mut entry, buf.as_mut_ptr() as *mut _, buf.len(), &mut res)
};
if status < 0 {
die!("getpwuid_r failed");
}
if res.is_null() {
die!("pw not found");
}
// sanity check
assert_eq!(entry.pw_uid, uid);
// Build a borrowed Passwd struct
//
// Transmute is used here to conveniently cast from the raw CStr to a &str with the appropriate
// lifetime.
Passwd {
name: unsafe { CStr::from_ptr(entry.pw_name).to_str().unwrap() },
passwd: unsafe { CStr::from_ptr(entry.pw_passwd).to_str().unwrap() },
uid: entry.pw_uid,
gid: entry.pw_gid,
gecos: unsafe { CStr::from_ptr(entry.pw_gecos).to_str().unwrap() },
dir: unsafe { CStr::from_ptr(entry.pw_dir).to_str().unwrap() },
shell: unsafe { CStr::from_ptr(entry.pw_shell).to_str().unwrap() },
}
}
/// Exec a shell
fn execsh(config: &Config) -> ! {
let mut buf = [0; 1024];
let pw = get_pw_entry(&mut buf);
let shell = match config.shell() {
Some(shell) => match shell.to_str() {
Some(shell) => shell,
None => die!("Invalid shell value")
},
None => pw.shell
};
// setup environment
env::set_var("LOGNAME", pw.name);
env::set_var("USER", pw.name);
env::set_var("SHELL", shell);
env::set_var("HOME", pw.dir);
env::set_var("TERM", "xterm-256color"); // sigh
unsafe {
libc::signal(libc::SIGCHLD, libc::SIG_DFL);
libc::signal(libc::SIGHUP, libc::SIG_DFL);
libc::signal(libc::SIGINT, libc::SIG_DFL);
libc::signal(libc::SIGQUIT, libc::SIG_DFL);
libc::signal(libc::SIGTERM, libc::SIG_DFL);
libc::signal(libc::SIGALRM, libc::SIG_DFL);
}
// pw.shell is null terminated
let shell = unsafe { CStr::from_ptr(shell.as_ptr() as *const _) };
let argv = [shell.as_ptr(), ptr::null()];
let res = unsafe {
libc::execvp(shell.as_ptr(), argv.as_ptr())
};
if res < 0 {
die!("execvp failed: {}", errno());
}
::std::process::exit(1);
}
/// Create a new tty and return a handle to interact with it.
pub fn new<T: ToWinsize>(config: &Config, size: T) -> Pty {
let win = size.to_winsize();
let (master, slave) = openpty(win.ws_row as _, win.ws_col as _);
match fork() {
Relation::Child => {
unsafe {
// Create a new process group
libc::setsid();
// Duplicate pty slave to be child stdin, stdoud, and stderr
libc::dup2(slave, 0);
libc::dup2(slave, 1);
libc::dup2(slave, 2);
}
set_controlling_terminal(slave);
// No longer need slave/master fds
unsafe {
libc::close(slave);
libc::close(master);
}
// Exec a shell!
execsh(config);
},
Relation::Parent(pid) => {
unsafe {
// Set PID for SIGCHLD handler
PID = pid;
// Handle SIGCHLD
libc::signal(SIGCHLD, sigchld as _);
// Parent doesn't need slave fd
libc::close(slave);
}
unsafe {
// Maybe this should be done outside of this function so nonblocking
// isn't forced upon consumers. Although maybe it should be?
set_nonblocking(master);
}
let pty = Pty { fd: master };
pty.resize(size);
pty
}
}
}
pub struct Pty {
fd: c_int,
}
impl Pty {
/// Get reader for the TTY
///
/// XXX File is a bad abstraction here; it closes the fd on drop
pub fn reader(&self) -> File {
unsafe {
File::from_raw_fd(self.fd)
}
}
/// Resize the pty
///
/// Tells the kernel that the window size changed with the new pixel
/// dimensions and line/column counts.
pub fn resize<T: ToWinsize>(&self, size: T) {
let win = size.to_winsize();
let res = unsafe {
libc::ioctl(self.fd, libc::TIOCSWINSZ, &win as *const _)
};
if res < 0 {
die!("ioctl TIOCSWINSZ failed: {}", errno());
}
}
}
/// Types that can produce a `libc::winsize`
pub trait ToWinsize {
/// Get a `libc::winsize`
fn to_winsize(&self) -> winsize;
}
impl<'a> ToWinsize for &'a SizeInfo {
fn to_winsize(&self) -> winsize {
winsize {
ws_row: self.lines().0 as libc::c_ushort,
ws_col: self.cols().0 as libc::c_ushort,
ws_xpixel: self.width as libc::c_ushort,
ws_ypixel: self.height as libc::c_ushort,
}
}
}
impl OnResize for Pty {
fn on_resize(&mut self, size: &SizeInfo) {
self.resize(size);
}
}
unsafe fn set_nonblocking(fd: c_int) {
use libc::{fcntl, F_SETFL, F_GETFL, O_NONBLOCK};
let res = fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);
assert_eq!(res, 0);
}
#[test]
fn test_get_pw_entry() {
let mut buf: [i8; 1024] = [0; 1024];
let pw = get_pw_entry(&mut buf);
println!("{:?}", pw);
}
|
use std::marker;
use std::ops::{Add, Div, Mul, Not, Sub};
#[derive(Debug,Clone,Copy,PartialEq,Eq,PartialOrd,Ord)]
pub struct U12(u16);
// MARK: - Public Constants
pub const MAX: U12 = U12(0xFFF);
pub const MIN: U12 = U12(0x000);
// MARK: - Implementation
impl U12 {
/// Returns the smallest value that can be represented by this integer type.
pub fn min_value() -> Self {
MIN
}
/// Returns the largest value that can be represented by this integer type.
pub fn max_value() -> Self {
MAX
}
/// Returns the number of ones in the binary representation of `self`.
pub fn count_ones(self) -> u32 {
self.0.count_ones()
}
/// Returns the number of zeros in the binary representation of `self`.
pub fn count_zeros(self) -> u32 {
self.0.count_zeros() - 4
}
/// Returns the number of leading zeros in the binary representation of `self`.
pub fn leading_zeros(self) -> u32 {
self.0.leading_zeros() - 4
}
/// Returns the number of trailing zeros in the binary representation of `self`.
pub fn trailing_zeros(self) -> u32 {
self.0.trailing_zeros()
}
/// Checked integer addition.
/// Computes `self + other`, returning `None` if overflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).checked_add(1u8.into()), Some(U12::from(2u8)));
/// assert_eq!(U12::max_value().checked_add(1u8.into()), None);
/// ```
pub fn checked_add(self, other: Self) -> Option<Self> {
match self.0 + other.0 {
result @ 0...4095 => Some(U12(result)),
_ => None
}
}
/// Saturating integer addition.
/// Computes `self + other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).saturating_add(1u8.into()), U12::from(2u8));
/// assert_eq!(U12::max_value().saturating_add(1u8.into()), U12::max_value());
/// ```
pub fn saturating_add(self, other: Self) -> Self {
match self.0 + other.0 {
result @ 0...4095 => U12(result),
_ => Self::max_value()
}
}
/// Wrapping (modular) addition.
/// Computes `self + other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).wrapping_add(1u8.into()), U12::from(2u8));
/// assert_eq!(U12::max_value().wrapping_add(3u8.into()), U12::from(2u8));
/// ```
pub fn wrapping_add(self, other: Self) -> Self {
U12((self.0 + other.0) & 0xFFF)
}
/// Overflowing addition.
/// Computes `self + other`, returning a tuple of the addition along with a
/// boolean indicating whether an arithmetic overflow would occur.
/// If an overflow would have occurred then the wrapped value is returned.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).overflowing_add(1u8.into()), (U12::from(2u8), false));
/// assert_eq!(U12::max_value().overflowing_add(3u8.into()), (U12::from(2u8), true));
/// ```
pub fn overflowing_add(self, other: Self) -> (Self, bool) {
match self.checked_add(other) {
Some(result) => (result, false),
None => (self.wrapping_add(other), true)
}
}
/// Checked integer subtraction.
/// Computes `self - other`, returning `None` if underflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).checked_sub(1u8.into()), Some(U12::from(0u8)));
/// assert_eq!(U12::min_value().checked_sub(1u8.into()), None);
/// ```
pub fn checked_sub(self, other: Self) -> Option<Self> {
match self.0.checked_sub(other.0) {
Some(value) => Some(U12(value)),
None => None
}
}
/// Saturating integer subtraction.
/// Computes `self - other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).saturating_sub(1u8.into()), U12::min_value());
/// assert_eq!(U12::min_value().saturating_sub(5u8.into()), U12::min_value());
/// ```
pub fn saturating_sub(self, other: Self) -> Self {
U12(self.0.saturating_sub(other.0))
}
/// Wrapping (modular) subtraction.
/// Computes `self - other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(1u8).wrapping_sub(1u8.into()), U12::min_value());
/// assert_eq!(U12::min_value().wrapping_sub(5u8.into()), (0xFFB as u16).unchecked_into());
/// ```
pub fn wrapping_sub(self, other: Self) -> Self {
U12(self.0.wrapping_sub(other.0) & 0xFFF)
}
/// Checked integer multiplication.
/// Computes `self * other`, returning `None` if overflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).checked_mul(255u8.into()), Some((510 as u16).unchecked_into()));
/// assert_eq!(U12::from(2u8).checked_mul((2048u16).unchecked_into()), None);
/// assert_eq!(U12::from(2u8).checked_mul((4095u16).unchecked_into()), None);
/// ```
pub fn checked_mul(self, other: Self) -> Option<Self> {
match self.0.checked_mul(other.0) {
Some(small) if small < 4096 => Some(U12(small)),
_ => None
}
}
/// Saturating integer multiplication.
/// Computes `self * other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).saturating_mul(1u8.into()), 2u8.into());
/// assert_eq!(U12::from(2u8).saturating_mul((2048u16).unchecked_into()), U12::max_value());
/// assert_eq!(U12::from(2u8).saturating_mul((4095u16).unchecked_into()), U12::max_value());
/// ```
pub fn saturating_mul(self, other: Self) -> Self {
match self.0.checked_mul(other.0) {
Some(small) if small < 4096 => U12(small),
_ => Self::max_value()
}
}
/// Wrapping (modular) multiplication.
/// Computes `self * other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_mul(1u8.into()), 2u8.into());
/// assert_eq!(U12::from(2u8).wrapping_mul((2048u16).unchecked_into()), 0u8.into());
/// assert_eq!(U12::from(2u8).wrapping_mul((4095u16).unchecked_into()), (0xFFE as u16).unchecked_into());
/// ```
pub fn wrapping_mul(self, other: Self) -> Self {
U12(self.0.wrapping_mul(other.0) & 0xFFF)
}
/// Checked integer division.
/// Computes `self / other`, returning None if other == 0 or the operation results in underflow or overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).checked_div(0u8.into()), None);
/// assert_eq!(U12::from(2u8).checked_div((2048u16).unchecked_into()), Some(U12::min_value()));
/// assert_eq!(U12::from(2u8).checked_div(2u8.into()), Some(U12::from(1u8)));
/// ```
pub fn checked_div(self, other: Self) -> Option<Self> {
match self.0.checked_div(other.0) {
Some(small) => Some(U12(small)),
_ => None
}
}
/// Wrapping (modular) division.
/// Computes self / other. Wrapped division on unsigned types is just normal division.
/// There's no way wrapping could ever happen. This function exists, so that all operations
/// are accounted for in the wrapping operations.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_div((2048u16).unchecked_into()), U12::min_value());
/// assert_eq!(U12::from(2u8).wrapping_div(2u8.into()), U12::from(1u8));
/// ```
pub fn wrapping_div(self, other: Self) -> Self {
U12(self.0.wrapping_div(other.0))
}
/// Checked integer negation.
/// Computes `-self`, returning `None` unless `self == 0`.
/// Note that negating any positive integer will overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(0u8).checked_neg(), Some(0u8.into()));
/// assert_eq!(U12::from(2u8).checked_neg(), None);
/// ```
pub fn checked_neg(self) -> Option<Self> {
match self.0 {
0 => Some(self),
_ => None
}
}
/// Wrapping (modular) negation.
/// Computes `-self`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_neg(), 0xFFEu16.unchecked_into());
/// assert_eq!(U12::from(255u8).wrapping_neg(), 0xF01u16.unchecked_into());
/// ```
pub fn wrapping_neg(self) -> Self {
U12(self.0.wrapping_neg() & 0xFFF)
}
/// Negates self in an overflowing fashion.
/// Returns `!self + 1` using wrapping operations to return the value that
/// represents the negation of this unsigned value. Note that for positive
/// unsigned values overflow always occurs, but negating `0` does not overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(0u8).overflowing_neg(), (0u8.into(), false));
/// assert_eq!(U12::from(2u8).overflowing_neg(), (0xFFEu16.unchecked_into(), true));
/// ```
pub fn overflowing_neg(self) -> (U12, bool) {
match self.0 {
0 => (self, false),
_ => (self.wrapping_neg(), true)
}
}
}
// MARK: - Non-Failable Conversions - From Smaller Types
impl From<u8> for U12 {
fn from(small: u8) -> Self {
U12(small as u16)
}
}
// MARK: - Non-Failable Conversions - Into Larger Types
/// Implements From<U12> for the specified type.
macro_rules! impl_from_u12 {
($result:path) => {
impl From<U12> for $result {
fn from(small: U12) -> Self {
small.0 as Self
}
}
}
}
impl_from_u12!(u16);
impl_from_u12!(u32);
impl_from_u12!(u64);
impl_from_u12!(usize);
// MARK: - Failable Conversions - From Larger Types
/// Trait for implementing failable conversions in a generic way.
pub trait FailableInto<T> where Self: marker::Sized, T: marker::Sized {
/// Returns the receiver as `Some(T)` if non-truncating, or `None`.
fn failable_into(self) -> Option<T>;
/// Returns the receiver as `T` by using `convert_as()` and unwrapping the result.
///
/// # Panics
/// This method will panic if `convert_as` fails.
fn unchecked_into(self) -> T {
self.failable_into().unwrap()
}
}
/// Implements FailableAs<U12> for the specified type.
macro_rules! impl_failable_into_u12 {
($source_type:path) => {
impl FailableInto<U12> for $source_type {
fn failable_into(self) -> Option<U12> {
if self > 0xFFF {
None
} else {
Some(U12(self as u16))
}
}
}
}
}
impl_failable_into_u12!(u16);
impl_failable_into_u12!(u32);
impl_failable_into_u12!(u64);
impl_failable_into_u12!(usize);
// MARK: - Default
impl Default for U12 {
fn default() -> Self {
U12::min_value()
}
}
// MARK: - Add
impl Add<U12> for U12 {
type Output = U12;
fn add(self, other: U12) -> Self::Output {
match self.checked_add(other) {
Some(result) => result,
None => {
panic!("arithmetic overflow")
}
}
}
}
impl<'a> Add<U12> for &'a U12 {
type Output = U12;
fn add(self, other: U12) -> Self::Output {
(*self).add(other)
}
}
impl<'a> Add<&'a U12> for U12 {
type Output = U12;
fn add(self, other: &'a U12) -> Self::Output {
self.add(*other)
}
}
impl<'a,'b> Add<&'a U12> for &'b U12 {
type Output = U12;
fn add(self, other: &'a U12) -> Self::Output {
(*self).add(*other)
}
}
// MARK: - Sub
impl Sub<U12> for U12 {
type Output = U12;
fn sub(self, other: U12) -> Self::Output {
match self.checked_sub(other) {
Some(result) => result,
None => {
panic!("arithmetic underflow")
}
}
}
}
impl<'a> Sub<U12> for &'a U12 {
type Output = U12;
fn sub(self, other: U12) -> Self::Output {
(*self).sub(other)
}
}
impl<'a> Sub<&'a U12> for U12 {
type Output = U12;
fn sub(self, other: &'a U12) -> Self::Output {
self.sub(*other)
}
}
impl<'a,'b> Sub<&'a U12> for &'b U12 {
type Output = U12;
fn sub(self, other: &'a U12) -> Self::Output {
(*self).sub(*other)
}
}
// MARK: - Mul
impl Mul<U12> for U12 {
type Output = U12;
fn mul(self, other: U12) -> Self::Output {
match self.checked_mul(other) {
Some(result) => result,
None => {
panic!("arithmetic overflow")
}
}
}
}
impl<'a> Mul<U12> for &'a U12 {
type Output = U12;
fn mul(self, other: U12) -> Self::Output {
(*self).mul(other)
}
}
impl<'a> Mul<&'a U12> for U12 {
type Output = U12;
fn mul(self, other: &'a U12) -> Self::Output {
self.mul(*other)
}
}
impl<'a,'b> Mul<&'a U12> for &'b U12 {
type Output = U12;
fn mul(self, other: &'a U12) -> Self::Output {
(*self).mul(*other)
}
}
// MARK: - Div
impl Div<U12> for U12 {
type Output = U12;
fn div(self, other: U12) -> Self::Output {
match self.checked_div(other) {
Some(result) => result,
None => {
panic!("arithmetic exception")
}
}
}
}
impl<'a> Div<U12> for &'a U12 {
type Output = U12;
fn div(self, other: U12) -> Self::Output {
(*self).div(other)
}
}
impl<'a> Div<&'a U12> for U12 {
type Output = U12;
fn div(self, other: &'a U12) -> Self::Output {
self.div(*other)
}
}
impl<'a,'b> Div<&'a U12> for &'b U12 {
type Output = U12;
fn div(self, other: &'a U12) -> Self::Output {
(*self).div(*other)
}
}
// MARK: - Not
impl Not for U12 {
type Output = U12;
fn not(self) -> Self::Output {
U12((!self.0) & 0xFFF)
}
}
impl<'a> Not for &'a U12 {
type Output = U12;
fn not(self) -> Self::Output {
(*self).not()
}
}
Macro for implementing arithmetic trait families.
use std::marker;
use std::ops::{Add, Div, Mul, Not, Sub};
#[derive(Debug,Clone,Copy,PartialEq,Eq,PartialOrd,Ord)]
pub struct U12(u16);
// MARK: - Public Constants
/// The largest value representable by the `U12` type.
pub const MAX: U12 = U12(0xFFF);
/// The smallest value representable by the `U12` type.
pub const MIN: U12 = U12(0x000);
// MARK: - Implementation
impl U12 {
/// Returns the smallest value that can be represented by this integer type.
pub fn min_value() -> Self {
MIN
}
/// Returns the largest value that can be represented by this integer type.
pub fn max_value() -> Self {
MAX
}
/// Returns the number of ones in the binary representation of `self`.
pub fn count_ones(self) -> u32 {
self.0.count_ones()
}
/// Returns the number of zeros in the binary representation of `self`.
pub fn count_zeros(self) -> u32 {
self.0.count_zeros() - 4
}
/// Returns the number of leading zeros in the binary representation of `self`.
pub fn leading_zeros(self) -> u32 {
self.0.leading_zeros() - 4
}
/// Returns the number of trailing zeros in the binary representation of `self`.
pub fn trailing_zeros(self) -> u32 {
self.0.trailing_zeros()
}
/// Checked integer addition.
/// Computes `self + other`, returning `None` if overflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).checked_add(1u8.into()), Some(U12::from(2u8)));
/// assert_eq!(U12::max_value().checked_add(1u8.into()), None);
/// ```
pub fn checked_add(self, other: Self) -> Option<Self> {
match self.0 + other.0 {
result @ 0...4095 => Some(U12(result)),
_ => None
}
}
/// Saturating integer addition.
/// Computes `self + other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).saturating_add(1u8.into()), U12::from(2u8));
/// assert_eq!(U12::max_value().saturating_add(1u8.into()), U12::max_value());
/// ```
pub fn saturating_add(self, other: Self) -> Self {
match self.0 + other.0 {
result @ 0...4095 => U12(result),
_ => Self::max_value()
}
}
/// Wrapping (modular) addition.
/// Computes `self + other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).wrapping_add(1u8.into()), U12::from(2u8));
/// assert_eq!(U12::max_value().wrapping_add(3u8.into()), U12::from(2u8));
/// ```
pub fn wrapping_add(self, other: Self) -> Self {
U12((self.0 + other.0) & 0xFFF)
}
/// Overflowing addition.
/// Computes `self + other`, returning a tuple of the addition along with a
/// boolean indicating whether an arithmetic overflow would occur.
/// If an overflow would have occurred then the wrapped value is returned.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).overflowing_add(1u8.into()), (U12::from(2u8), false));
/// assert_eq!(U12::max_value().overflowing_add(3u8.into()), (U12::from(2u8), true));
/// ```
pub fn overflowing_add(self, other: Self) -> (Self, bool) {
match self.checked_add(other) {
Some(result) => (result, false),
None => (self.wrapping_add(other), true)
}
}
/// Checked integer subtraction.
/// Computes `self - other`, returning `None` if underflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).checked_sub(1u8.into()), Some(U12::from(0u8)));
/// assert_eq!(U12::min_value().checked_sub(1u8.into()), None);
/// ```
pub fn checked_sub(self, other: Self) -> Option<Self> {
match self.0.checked_sub(other.0) {
Some(value) => Some(U12(value)),
None => None
}
}
/// Saturating integer subtraction.
/// Computes `self - other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::U12;
///
/// assert_eq!(U12::from(1u8).saturating_sub(1u8.into()), U12::min_value());
/// assert_eq!(U12::min_value().saturating_sub(5u8.into()), U12::min_value());
/// ```
pub fn saturating_sub(self, other: Self) -> Self {
U12(self.0.saturating_sub(other.0))
}
/// Wrapping (modular) subtraction.
/// Computes `self - other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(1u8).wrapping_sub(1u8.into()), U12::min_value());
/// assert_eq!(U12::min_value().wrapping_sub(5u8.into()), (0xFFB as u16).unchecked_into());
/// ```
pub fn wrapping_sub(self, other: Self) -> Self {
U12(self.0.wrapping_sub(other.0) & 0xFFF)
}
/// Checked integer multiplication.
/// Computes `self * other`, returning `None` if overflow occurred.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).checked_mul(255u8.into()), Some((510 as u16).unchecked_into()));
/// assert_eq!(U12::from(2u8).checked_mul((2048u16).unchecked_into()), None);
/// assert_eq!(U12::from(2u8).checked_mul((4095u16).unchecked_into()), None);
/// ```
pub fn checked_mul(self, other: Self) -> Option<Self> {
match self.0.checked_mul(other.0) {
Some(small) if small < 4096 => Some(U12(small)),
_ => None
}
}
/// Saturating integer multiplication.
/// Computes `self * other`, saturating at the numeric bounds instead of overflowing.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).saturating_mul(1u8.into()), 2u8.into());
/// assert_eq!(U12::from(2u8).saturating_mul((2048u16).unchecked_into()), U12::max_value());
/// assert_eq!(U12::from(2u8).saturating_mul((4095u16).unchecked_into()), U12::max_value());
/// ```
pub fn saturating_mul(self, other: Self) -> Self {
match self.0.checked_mul(other.0) {
Some(small) if small < 4096 => U12(small),
_ => Self::max_value()
}
}
/// Wrapping (modular) multiplication.
/// Computes `self * other`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_mul(1u8.into()), 2u8.into());
/// assert_eq!(U12::from(2u8).wrapping_mul((2048u16).unchecked_into()), 0u8.into());
/// assert_eq!(U12::from(2u8).wrapping_mul((4095u16).unchecked_into()), (0xFFE as u16).unchecked_into());
/// ```
pub fn wrapping_mul(self, other: Self) -> Self {
U12(self.0.wrapping_mul(other.0) & 0xFFF)
}
/// Checked integer division.
/// Computes `self / other`, returning None if other == 0 or the operation results in underflow or overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).checked_div(0u8.into()), None);
/// assert_eq!(U12::from(2u8).checked_div((2048u16).unchecked_into()), Some(U12::min_value()));
/// assert_eq!(U12::from(2u8).checked_div(2u8.into()), Some(U12::from(1u8)));
/// ```
pub fn checked_div(self, other: Self) -> Option<Self> {
match self.0.checked_div(other.0) {
Some(small) => Some(U12(small)),
_ => None
}
}
/// Wrapping (modular) division.
/// Computes self / other. Wrapped division on unsigned types is just normal division.
/// There's no way wrapping could ever happen. This function exists, so that all operations
/// are accounted for in the wrapping operations.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_div((2048u16).unchecked_into()), U12::min_value());
/// assert_eq!(U12::from(2u8).wrapping_div(2u8.into()), U12::from(1u8));
/// ```
pub fn wrapping_div(self, other: Self) -> Self {
U12(self.0.wrapping_div(other.0))
}
/// Checked integer negation.
/// Computes `-self`, returning `None` unless `self == 0`.
/// Note that negating any positive integer will overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(0u8).checked_neg(), Some(0u8.into()));
/// assert_eq!(U12::from(2u8).checked_neg(), None);
/// ```
pub fn checked_neg(self) -> Option<Self> {
match self.0 {
0 => Some(self),
_ => None
}
}
/// Wrapping (modular) negation.
/// Computes `-self`, wrapping around at the boundary of the type.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(2u8).wrapping_neg(), 0xFFEu16.unchecked_into());
/// assert_eq!(U12::from(255u8).wrapping_neg(), 0xF01u16.unchecked_into());
/// ```
pub fn wrapping_neg(self) -> Self {
U12(self.0.wrapping_neg() & 0xFFF)
}
/// Negates self in an overflowing fashion.
/// Returns `!self + 1` using wrapping operations to return the value that
/// represents the negation of this unsigned value. Note that for positive
/// unsigned values overflow always occurs, but negating `0` does not overflow.
///
/// # Examples
/// Basic usage:
///
/// ```
/// use twelve_bit::u12::*;
///
/// assert_eq!(U12::from(0u8).overflowing_neg(), (0u8.into(), false));
/// assert_eq!(U12::from(2u8).overflowing_neg(), (0xFFEu16.unchecked_into(), true));
/// ```
pub fn overflowing_neg(self) -> (U12, bool) {
match self.0 {
0 => (self, false),
_ => (self.wrapping_neg(), true)
}
}
}
// MARK: - Non-Failable Conversions - From Smaller Types
impl From<u8> for U12 {
fn from(small: u8) -> Self {
U12(small as u16)
}
}
// MARK: - Non-Failable Conversions - Into Larger Types
/// Implements From<U12> for the specified type.
macro_rules! impl_from_u12 {
($result:path) => {
impl From<U12> for $result {
fn from(small: U12) -> Self {
small.0 as Self
}
}
}
}
impl_from_u12!(u16);
impl_from_u12!(u32);
impl_from_u12!(u64);
impl_from_u12!(usize);
// MARK: - Failable Conversions - From Larger Types
/// Trait for implementing failable conversions in a generic way.
pub trait FailableInto<T> where Self: marker::Sized, T: marker::Sized {
/// Returns the receiver as `Some(T)` if non-truncating, or `None`.
fn failable_into(self) -> Option<T>;
/// Returns the receiver as `T` by using `convert_as()` and unwrapping the result.
///
/// # Panics
/// This method will panic if `convert_as` fails.
fn unchecked_into(self) -> T {
self.failable_into().unwrap()
}
}
/// Implements FailableAs<U12> for the specified type.
macro_rules! impl_failable_into_u12 {
($source_type:path) => {
impl FailableInto<U12> for $source_type {
fn failable_into(self) -> Option<U12> {
if self > 0xFFF {
None
} else {
Some(U12(self as u16))
}
}
}
}
}
impl_failable_into_u12!(u16);
impl_failable_into_u12!(u32);
impl_failable_into_u12!(u64);
impl_failable_into_u12!(usize);
// MARK: - Default
impl Default for U12 {
fn default() -> Self {
U12::min_value()
}
}
// MARK: - Arithmetic Operator Traits
///
/// Implements an arithmetic trait family for `U12`. This macro generates
/// implementations for an arithmetic trait `$trait_name` such that the
/// it is possible to invoke `$trait_fn` on all of `(U12, U12)`, `(&'a U12, U12)`,
/// `(U12, &'a U12)` and `(&'a U12, &'b U12)`. The implementation calls through to
/// `$checked_method` on U12. If the `$checked_method` returns `None`, the
/// trait panics with the message specified as `$message`.
///
macro_rules! impl_arithmetic_trait_family_for_u12 {
($trait_name:ident, $trait_fn:ident, $checked_method:ident, $message:expr) => {
/// Implementation of operation(U12, U12) -> U12.
impl $trait_name<U12> for U12 {
type Output = U12;
fn $trait_fn(self, other: U12) -> Self::Output {
match self.$checked_method(other) {
Some(result) => result,
None => {
panic!($message)
}
}
}
}
/// Implementation of operation(&'a U12, U12) -> U12.
impl<'a> $trait_name<U12> for &'a U12 {
type Output = U12;
fn $trait_fn(self, other: U12) -> Self::Output {
(*self).$trait_fn(other)
}
}
/// Implementation of operation(U12, &'a U12) -> U12.
impl<'a> $trait_name<&'a U12> for U12 {
type Output = U12;
fn $trait_fn(self, other: &'a U12) -> Self::Output {
self.$trait_fn(*other)
}
}
/// Implementation of operation(&'a U12, &'b U12) -> U12.
impl<'a,'b> $trait_name<&'a U12> for &'b U12 {
type Output = U12;
fn $trait_fn(self, other: &'a U12) -> Self::Output {
(*self).$trait_fn(*other)
}
}
}
}
impl_arithmetic_trait_family_for_u12!(Add, add, checked_add, "arithmetic overflow");
impl_arithmetic_trait_family_for_u12!(Sub, sub, checked_sub, "arithmetic underflow");
impl_arithmetic_trait_family_for_u12!(Mul, mul, checked_mul, "arithmetic overflow");
impl_arithmetic_trait_family_for_u12!(Div, div, checked_div, "arithmetic exception");
// MARK: - Not
impl Not for U12 {
type Output = U12;
fn not(self) -> Self::Output {
U12((!self.0) & 0xFFF)
}
}
impl<'a> Not for &'a U12 {
type Output = U12;
fn not(self) -> Self::Output {
(*self).not()
}
}
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Integer trait and functions.
use {Num, Signed};
pub trait Integer
: Sized
+ Num
+ PartialOrd + Ord + Eq
{
/// Floored integer division.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8).div_floor(& 3) == 2);
/// assert!(( 8).div_floor(&-3) == -3);
/// assert!((-8).div_floor(& 3) == -3);
/// assert!((-8).div_floor(&-3) == 2);
///
/// assert!(( 1).div_floor(& 2) == 0);
/// assert!(( 1).div_floor(&-2) == -1);
/// assert!((-1).div_floor(& 2) == -1);
/// assert!((-1).div_floor(&-2) == 0);
/// ~~~
fn div_floor(&self, other: &Self) -> Self;
/// Floored integer modulo, satisfying:
///
/// ~~~
/// # use num::Integer;
/// # let n = 1; let d = 1;
/// assert!(n.div_floor(&d) * d + n.mod_floor(&d) == n)
/// ~~~
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8).mod_floor(& 3) == 2);
/// assert!(( 8).mod_floor(&-3) == -1);
/// assert!((-8).mod_floor(& 3) == 1);
/// assert!((-8).mod_floor(&-3) == -2);
///
/// assert!(( 1).mod_floor(& 2) == 1);
/// assert!(( 1).mod_floor(&-2) == -1);
/// assert!((-1).mod_floor(& 2) == 1);
/// assert!((-1).mod_floor(&-2) == -1);
/// ~~~
fn mod_floor(&self, other: &Self) -> Self;
/// Greatest Common Divisor (GCD).
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(6.gcd(&8), 2);
/// assert_eq!(7.gcd(&3), 1);
/// ~~~
fn gcd(&self, other: &Self) -> Self;
/// Lowest Common Multiple (LCM).
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(7.lcm(&3), 21);
/// assert_eq!(2.lcm(&4), 4);
/// ~~~
fn lcm(&self, other: &Self) -> Self;
/// Deprecated, use `is_multiple_of` instead.
fn divides(&self, other: &Self) -> bool;
/// Returns `true` if `other` is a multiple of `self`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(9.is_multiple_of(&3), true);
/// assert_eq!(3.is_multiple_of(&9), false);
/// ~~~
fn is_multiple_of(&self, other: &Self) -> bool;
/// Returns `true` if the number is even.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(3.is_even(), false);
/// assert_eq!(4.is_even(), true);
/// ~~~
fn is_even(&self) -> bool;
/// Returns `true` if the number is odd.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(3.is_odd(), true);
/// assert_eq!(4.is_odd(), false);
/// ~~~
fn is_odd(&self) -> bool;
/// Simultaneous truncated integer division and modulus.
/// Returns `(quotient, remainder)`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(( 8).div_rem( &3), ( 2, 2));
/// assert_eq!(( 8).div_rem(&-3), (-2, 2));
/// assert_eq!((-8).div_rem( &3), (-2, -2));
/// assert_eq!((-8).div_rem(&-3), ( 2, -2));
///
/// assert_eq!(( 1).div_rem( &2), ( 0, 1));
/// assert_eq!(( 1).div_rem(&-2), ( 0, 1));
/// assert_eq!((-1).div_rem( &2), ( 0, -1));
/// assert_eq!((-1).div_rem(&-2), ( 0, -1));
/// ~~~
#[inline]
fn div_rem(&self, other: &Self) -> (Self, Self);
/// Simultaneous floored integer division and modulus.
/// Returns `(quotient, remainder)`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(( 8).div_mod_floor( &3), ( 2, 2));
/// assert_eq!(( 8).div_mod_floor(&-3), (-3, -1));
/// assert_eq!((-8).div_mod_floor( &3), (-3, 1));
/// assert_eq!((-8).div_mod_floor(&-3), ( 2, -2));
///
/// assert_eq!(( 1).div_mod_floor( &2), ( 0, 1));
/// assert_eq!(( 1).div_mod_floor(&-2), (-1, -1));
/// assert_eq!((-1).div_mod_floor( &2), (-1, 1));
/// assert_eq!((-1).div_mod_floor(&-2), ( 0, -1));
/// ~~~
fn div_mod_floor(&self, other: &Self) -> (Self, Self) {
(self.div_floor(other), self.mod_floor(other))
}
}
/// Simultaneous integer division and modulus
#[inline] pub fn div_rem<T: Integer>(x: T, y: T) -> (T, T) { x.div_rem(&y) }
/// Floored integer division
#[inline] pub fn div_floor<T: Integer>(x: T, y: T) -> T { x.div_floor(&y) }
/// Floored integer modulus
#[inline] pub fn mod_floor<T: Integer>(x: T, y: T) -> T { x.mod_floor(&y) }
/// Simultaneous floored integer division and modulus
#[inline] pub fn div_mod_floor<T: Integer>(x: T, y: T) -> (T, T) { x.div_mod_floor(&y) }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`. The
/// result is always positive.
#[inline(always)] pub fn gcd<T: Integer>(x: T, y: T) -> T { x.gcd(&y) }
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`.
#[inline(always)] pub fn lcm<T: Integer>(x: T, y: T) -> T { x.lcm(&y) }
macro_rules! impl_integer_for_isize {
($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Floored integer division
#[inline]
fn div_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => d - 1,
(d, _) => d,
}
}
/// Floored integer modulo
#[inline]
fn mod_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match *self % *other {
r if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => r + *other,
r => r,
}
}
/// Calculates `div_floor` and `mod_floor` simultaneously
#[inline]
fn div_mod_floor(&self, other: &$T) -> ($T,$T) {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => (d - 1, r + *other),
(d, r) => (d, r),
}
}
/// Calculates the Greatest Common Divisor (GCD) of the number and
/// `other`. The result is always positive.
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Stein's algorithm
let mut m = *self;
let mut n = *other;
if m == 0 || n == 0 { return (m | n).abs() }
// find common factors of 2
let shift = (m | n).trailing_zeros();
// The algorithm needs positive numbers, but the minimum value
// can't be represented as a positive one.
// It's also a power of two, so the gcd can be
// calculated by bitshifting in that case
// Assuming two's complement, the number created by the shift
// is positive for all numbers except gcd = abs(min value)
// The call to .abs() causes a panic in debug mode
if m == <$T>::min_value() || n == <$T>::min_value() {
return (1 << shift).abs()
}
// guaranteed to be positive now, rest like unsigned algorithm
m = m.abs();
n = n.abs();
// divide n and m by 2 until odd
// m inside loop
n >>= n.trailing_zeros();
while m != 0 {
m >>= m.trailing_zeros();
if n > m { ::std::mem::swap(&mut n, &mut m) }
m -= n;
}
n << shift
}
/// Calculates the Lowest Common Multiple (LCM) of the number and
/// `other`.
#[inline]
fn lcm(&self, other: &$T) -> $T {
// should not have to recalculate abs
((*self * *other) / self.gcd(other)).abs()
}
/// Deprecated, use `is_multiple_of` instead.
#[inline]
fn divides(&self, other: &$T) -> bool { return self.is_multiple_of(other); }
/// Returns `true` if the number is a multiple of `other`.
#[inline]
fn is_multiple_of(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`
#[inline]
fn is_even(&self) -> bool { (*self) & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`
#[inline]
fn is_odd(&self) -> bool { !self.is_even() }
/// Simultaneous truncated integer division and modulus.
#[inline]
fn div_rem(&self, other: &$T) -> ($T, $T) {
(*self / *other, *self % *other)
}
}
#[cfg(test)]
mod $test_mod {
use Integer;
/// Checks that the division rule holds for:
///
/// - `n`: numerator (dividend)
/// - `d`: denominator (divisor)
/// - `qr`: quotient and remainder
#[cfg(test)]
fn test_division_rule((n,d): ($T,$T), (q,r): ($T,$T)) {
assert_eq!(d * q + r, n);
}
#[test]
fn test_div_rem() {
fn test_nd_dr(nd: ($T,$T), qr: ($T,$T)) {
let (n,d) = nd;
let separate_div_rem = (n / d, n % d);
let combined_div_rem = n.div_rem(&d);
assert_eq!(separate_div_rem, qr);
assert_eq!(combined_div_rem, qr);
test_division_rule(nd, separate_div_rem);
test_division_rule(nd, combined_div_rem);
}
test_nd_dr(( 8, 3), ( 2, 2));
test_nd_dr(( 8, -3), (-2, 2));
test_nd_dr((-8, 3), (-2, -2));
test_nd_dr((-8, -3), ( 2, -2));
test_nd_dr(( 1, 2), ( 0, 1));
test_nd_dr(( 1, -2), ( 0, 1));
test_nd_dr((-1, 2), ( 0, -1));
test_nd_dr((-1, -2), ( 0, -1));
}
#[test]
fn test_div_mod_floor() {
fn test_nd_dm(nd: ($T,$T), dm: ($T,$T)) {
let (n,d) = nd;
let separate_div_mod_floor = (n.div_floor(&d), n.mod_floor(&d));
let combined_div_mod_floor = n.div_mod_floor(&d);
assert_eq!(separate_div_mod_floor, dm);
assert_eq!(combined_div_mod_floor, dm);
test_division_rule(nd, separate_div_mod_floor);
test_division_rule(nd, combined_div_mod_floor);
}
test_nd_dm(( 8, 3), ( 2, 2));
test_nd_dm(( 8, -3), (-3, -1));
test_nd_dm((-8, 3), (-3, 1));
test_nd_dm((-8, -3), ( 2, -2));
test_nd_dm(( 1, 2), ( 0, 1));
test_nd_dm(( 1, -2), (-1, -1));
test_nd_dm((-1, 2), (-1, 1));
test_nd_dm((-1, -2), ( 0, -1));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
assert_eq!((3 as $T).gcd(&-3), 3 as $T);
assert_eq!((-6 as $T).gcd(&3), 3 as $T);
assert_eq!((-4 as $T).gcd(&-2), 2 as $T);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((-1 as $T).lcm(&1), 1 as $T);
assert_eq!((1 as $T).lcm(&-1), 1 as $T);
assert_eq!((-1 as $T).lcm(&-1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
}
#[test]
fn test_even() {
assert_eq!((-4 as $T).is_even(), true);
assert_eq!((-3 as $T).is_even(), false);
assert_eq!((-2 as $T).is_even(), true);
assert_eq!((-1 as $T).is_even(), false);
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((-4 as $T).is_odd(), false);
assert_eq!((-3 as $T).is_odd(), true);
assert_eq!((-2 as $T).is_odd(), false);
assert_eq!((-1 as $T).is_odd(), true);
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_isize!(i8, test_integer_i8);
impl_integer_for_isize!(i16, test_integer_i16);
impl_integer_for_isize!(i32, test_integer_i32);
impl_integer_for_isize!(i64, test_integer_i64);
impl_integer_for_isize!(isize, test_integer_isize);
macro_rules! impl_integer_for_usize {
($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Unsigned integer division. Returns the same result as `div` (`/`).
#[inline]
fn div_floor(&self, other: &$T) -> $T { *self / *other }
/// Unsigned integer modulo operation. Returns the same result as `rem` (`%`).
#[inline]
fn mod_floor(&self, other: &$T) -> $T { *self % *other }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Stein's algorithm
let mut m = *self;
let mut n = *other;
if m == 0 || n == 0 { return m | n }
// find common factors of 2
let shift = (m | n).trailing_zeros();
// divide n and m by 2 until odd
// m inside loop
n >>= n.trailing_zeros();
while m != 0 {
m >>= m.trailing_zeros();
if n > m { ::std::mem::swap(&mut n, &mut m) }
m -= n;
}
n << shift
}
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`.
#[inline]
fn lcm(&self, other: &$T) -> $T {
(*self * *other) / self.gcd(other)
}
/// Deprecated, use `is_multiple_of` instead.
#[inline]
fn divides(&self, other: &$T) -> bool { return self.is_multiple_of(other); }
/// Returns `true` if the number is a multiple of `other`.
#[inline]
fn is_multiple_of(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`.
#[inline]
fn is_even(&self) -> bool { (*self) & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`.
#[inline]
fn is_odd(&self) -> bool { !(*self).is_even() }
/// Simultaneous truncated integer division and modulus.
#[inline]
fn div_rem(&self, other: &$T) -> ($T, $T) {
(*self / *other, *self % *other)
}
}
#[cfg(test)]
mod $test_mod {
use Integer;
#[test]
fn test_div_mod_floor() {
assert_eq!((10 as $T).div_floor(&(3 as $T)), 3 as $T);
assert_eq!((10 as $T).mod_floor(&(3 as $T)), 1 as $T);
assert_eq!((10 as $T).div_mod_floor(&(3 as $T)), (3 as $T, 1 as $T));
assert_eq!((5 as $T).div_floor(&(5 as $T)), 1 as $T);
assert_eq!((5 as $T).mod_floor(&(5 as $T)), 0 as $T);
assert_eq!((5 as $T).div_mod_floor(&(5 as $T)), (1 as $T, 0 as $T));
assert_eq!((3 as $T).div_floor(&(7 as $T)), 0 as $T);
assert_eq!((3 as $T).mod_floor(&(7 as $T)), 3 as $T);
assert_eq!((3 as $T).div_mod_floor(&(7 as $T)), (0 as $T, 3 as $T));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
assert_eq!((15 as $T).lcm(&17), 255 as $T);
}
#[test]
fn test_is_multiple_of() {
assert!((6 as $T).is_multiple_of(&(6 as $T)));
assert!((6 as $T).is_multiple_of(&(3 as $T)));
assert!((6 as $T).is_multiple_of(&(1 as $T)));
}
#[test]
fn test_even() {
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_usize!(u8, test_integer_u8);
impl_integer_for_usize!(u16, test_integer_u16);
impl_integer_for_usize!(u32, test_integer_u32);
impl_integer_for_usize!(u64, test_integer_u64);
impl_integer_for_usize!(usize, test_integer_usize);
Added tests for gcd
compare gcd with euclidean gcd
test panics on gcd(min_val, min_val) and gcd(min_val, 0) for signed integers
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Integer trait and functions.
use {Num, Signed};
pub trait Integer
: Sized
+ Num
+ PartialOrd + Ord + Eq
{
/// Floored integer division.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8).div_floor(& 3) == 2);
/// assert!(( 8).div_floor(&-3) == -3);
/// assert!((-8).div_floor(& 3) == -3);
/// assert!((-8).div_floor(&-3) == 2);
///
/// assert!(( 1).div_floor(& 2) == 0);
/// assert!(( 1).div_floor(&-2) == -1);
/// assert!((-1).div_floor(& 2) == -1);
/// assert!((-1).div_floor(&-2) == 0);
/// ~~~
fn div_floor(&self, other: &Self) -> Self;
/// Floored integer modulo, satisfying:
///
/// ~~~
/// # use num::Integer;
/// # let n = 1; let d = 1;
/// assert!(n.div_floor(&d) * d + n.mod_floor(&d) == n)
/// ~~~
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert!(( 8).mod_floor(& 3) == 2);
/// assert!(( 8).mod_floor(&-3) == -1);
/// assert!((-8).mod_floor(& 3) == 1);
/// assert!((-8).mod_floor(&-3) == -2);
///
/// assert!(( 1).mod_floor(& 2) == 1);
/// assert!(( 1).mod_floor(&-2) == -1);
/// assert!((-1).mod_floor(& 2) == 1);
/// assert!((-1).mod_floor(&-2) == -1);
/// ~~~
fn mod_floor(&self, other: &Self) -> Self;
/// Greatest Common Divisor (GCD).
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(6.gcd(&8), 2);
/// assert_eq!(7.gcd(&3), 1);
/// ~~~
fn gcd(&self, other: &Self) -> Self;
/// Lowest Common Multiple (LCM).
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(7.lcm(&3), 21);
/// assert_eq!(2.lcm(&4), 4);
/// ~~~
fn lcm(&self, other: &Self) -> Self;
/// Deprecated, use `is_multiple_of` instead.
fn divides(&self, other: &Self) -> bool;
/// Returns `true` if `other` is a multiple of `self`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(9.is_multiple_of(&3), true);
/// assert_eq!(3.is_multiple_of(&9), false);
/// ~~~
fn is_multiple_of(&self, other: &Self) -> bool;
/// Returns `true` if the number is even.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(3.is_even(), false);
/// assert_eq!(4.is_even(), true);
/// ~~~
fn is_even(&self) -> bool;
/// Returns `true` if the number is odd.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(3.is_odd(), true);
/// assert_eq!(4.is_odd(), false);
/// ~~~
fn is_odd(&self) -> bool;
/// Simultaneous truncated integer division and modulus.
/// Returns `(quotient, remainder)`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(( 8).div_rem( &3), ( 2, 2));
/// assert_eq!(( 8).div_rem(&-3), (-2, 2));
/// assert_eq!((-8).div_rem( &3), (-2, -2));
/// assert_eq!((-8).div_rem(&-3), ( 2, -2));
///
/// assert_eq!(( 1).div_rem( &2), ( 0, 1));
/// assert_eq!(( 1).div_rem(&-2), ( 0, 1));
/// assert_eq!((-1).div_rem( &2), ( 0, -1));
/// assert_eq!((-1).div_rem(&-2), ( 0, -1));
/// ~~~
#[inline]
fn div_rem(&self, other: &Self) -> (Self, Self);
/// Simultaneous floored integer division and modulus.
/// Returns `(quotient, remainder)`.
///
/// # Examples
///
/// ~~~
/// # use num::Integer;
/// assert_eq!(( 8).div_mod_floor( &3), ( 2, 2));
/// assert_eq!(( 8).div_mod_floor(&-3), (-3, -1));
/// assert_eq!((-8).div_mod_floor( &3), (-3, 1));
/// assert_eq!((-8).div_mod_floor(&-3), ( 2, -2));
///
/// assert_eq!(( 1).div_mod_floor( &2), ( 0, 1));
/// assert_eq!(( 1).div_mod_floor(&-2), (-1, -1));
/// assert_eq!((-1).div_mod_floor( &2), (-1, 1));
/// assert_eq!((-1).div_mod_floor(&-2), ( 0, -1));
/// ~~~
fn div_mod_floor(&self, other: &Self) -> (Self, Self) {
(self.div_floor(other), self.mod_floor(other))
}
}
/// Simultaneous integer division and modulus
#[inline] pub fn div_rem<T: Integer>(x: T, y: T) -> (T, T) { x.div_rem(&y) }
/// Floored integer division
#[inline] pub fn div_floor<T: Integer>(x: T, y: T) -> T { x.div_floor(&y) }
/// Floored integer modulus
#[inline] pub fn mod_floor<T: Integer>(x: T, y: T) -> T { x.mod_floor(&y) }
/// Simultaneous floored integer division and modulus
#[inline] pub fn div_mod_floor<T: Integer>(x: T, y: T) -> (T, T) { x.div_mod_floor(&y) }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`. The
/// result is always positive.
#[inline(always)] pub fn gcd<T: Integer>(x: T, y: T) -> T { x.gcd(&y) }
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`.
#[inline(always)] pub fn lcm<T: Integer>(x: T, y: T) -> T { x.lcm(&y) }
macro_rules! impl_integer_for_isize {
($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Floored integer division
#[inline]
fn div_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => d - 1,
(d, _) => d,
}
}
/// Floored integer modulo
#[inline]
fn mod_floor(&self, other: &$T) -> $T {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match *self % *other {
r if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => r + *other,
r => r,
}
}
/// Calculates `div_floor` and `mod_floor` simultaneously
#[inline]
fn div_mod_floor(&self, other: &$T) -> ($T,$T) {
// Algorithm from [Daan Leijen. _Division and Modulus for Computer Scientists_,
// December 2001](http://research.microsoft.com/pubs/151917/divmodnote-letter.pdf)
match self.div_rem(other) {
(d, r) if (r > 0 && *other < 0)
|| (r < 0 && *other > 0) => (d - 1, r + *other),
(d, r) => (d, r),
}
}
/// Calculates the Greatest Common Divisor (GCD) of the number and
/// `other`. The result is always positive.
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Stein's algorithm
let mut m = *self;
let mut n = *other;
if m == 0 || n == 0 { return (m | n).abs() }
// find common factors of 2
let shift = (m | n).trailing_zeros();
// The algorithm needs positive numbers, but the minimum value
// can't be represented as a positive one.
// It's also a power of two, so the gcd can be
// calculated by bitshifting in that case
// Assuming two's complement, the number created by the shift
// is positive for all numbers except gcd = abs(min value)
// The call to .abs() causes a panic in debug mode
if m == <$T>::min_value() || n == <$T>::min_value() {
return (1 << shift).abs()
}
// guaranteed to be positive now, rest like unsigned algorithm
m = m.abs();
n = n.abs();
// divide n and m by 2 until odd
// m inside loop
n >>= n.trailing_zeros();
while m != 0 {
m >>= m.trailing_zeros();
if n > m { ::std::mem::swap(&mut n, &mut m) }
m -= n;
}
n << shift
}
/// Calculates the Lowest Common Multiple (LCM) of the number and
/// `other`.
#[inline]
fn lcm(&self, other: &$T) -> $T {
// should not have to recalculate abs
((*self * *other) / self.gcd(other)).abs()
}
/// Deprecated, use `is_multiple_of` instead.
#[inline]
fn divides(&self, other: &$T) -> bool { return self.is_multiple_of(other); }
/// Returns `true` if the number is a multiple of `other`.
#[inline]
fn is_multiple_of(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`
#[inline]
fn is_even(&self) -> bool { (*self) & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`
#[inline]
fn is_odd(&self) -> bool { !self.is_even() }
/// Simultaneous truncated integer division and modulus.
#[inline]
fn div_rem(&self, other: &$T) -> ($T, $T) {
(*self / *other, *self % *other)
}
}
#[cfg(test)]
mod $test_mod {
use Integer;
/// Checks that the division rule holds for:
///
/// - `n`: numerator (dividend)
/// - `d`: denominator (divisor)
/// - `qr`: quotient and remainder
#[cfg(test)]
fn test_division_rule((n,d): ($T,$T), (q,r): ($T,$T)) {
assert_eq!(d * q + r, n);
}
#[test]
fn test_div_rem() {
fn test_nd_dr(nd: ($T,$T), qr: ($T,$T)) {
let (n,d) = nd;
let separate_div_rem = (n / d, n % d);
let combined_div_rem = n.div_rem(&d);
assert_eq!(separate_div_rem, qr);
assert_eq!(combined_div_rem, qr);
test_division_rule(nd, separate_div_rem);
test_division_rule(nd, combined_div_rem);
}
test_nd_dr(( 8, 3), ( 2, 2));
test_nd_dr(( 8, -3), (-2, 2));
test_nd_dr((-8, 3), (-2, -2));
test_nd_dr((-8, -3), ( 2, -2));
test_nd_dr(( 1, 2), ( 0, 1));
test_nd_dr(( 1, -2), ( 0, 1));
test_nd_dr((-1, 2), ( 0, -1));
test_nd_dr((-1, -2), ( 0, -1));
}
#[test]
fn test_div_mod_floor() {
fn test_nd_dm(nd: ($T,$T), dm: ($T,$T)) {
let (n,d) = nd;
let separate_div_mod_floor = (n.div_floor(&d), n.mod_floor(&d));
let combined_div_mod_floor = n.div_mod_floor(&d);
assert_eq!(separate_div_mod_floor, dm);
assert_eq!(combined_div_mod_floor, dm);
test_division_rule(nd, separate_div_mod_floor);
test_division_rule(nd, combined_div_mod_floor);
}
test_nd_dm(( 8, 3), ( 2, 2));
test_nd_dm(( 8, -3), (-3, -1));
test_nd_dm((-8, 3), (-3, 1));
test_nd_dm((-8, -3), ( 2, -2));
test_nd_dm(( 1, 2), ( 0, 1));
test_nd_dm(( 1, -2), (-1, -1));
test_nd_dm((-1, 2), (-1, 1));
test_nd_dm((-1, -2), ( 0, -1));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
assert_eq!((3 as $T).gcd(&-3), 3 as $T);
assert_eq!((-6 as $T).gcd(&3), 3 as $T);
assert_eq!((-4 as $T).gcd(&-2), 2 as $T);
}
#[test]
fn test_gcd_cmp_with_euclidean() {
fn euclidean_gcd(mut m: $T, mut n: $T) -> $T {
while m != 0 {
::std::mem::swap(&mut m, &mut n);
m %= n;
}
n.abs()
}
// gcd(-128, b) = 128 is not representable as positive value
// for i8
for i in -127..127 {
for j in -127..127 {
assert_eq!(euclidean_gcd(i,j), i.gcd(&j));
}
}
// last value
// FIXME: Use inclusive ranges for above loop when implemented
let i = 127;
for j in -127..127 {
assert_eq!(euclidean_gcd(i,j), i.gcd(&j));
}
assert_eq!(127.gcd(&127), 127);
}
#[test]
#[should_panic]
fn test_gcd_min_val_min_val() {
let min = <$T>::min_value();
min.gcd(&min);
}
#[test]
#[should_panic]
fn test_gcd_min_val_0() {
let min = <$T>::min_value();
min.gcd(&0);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((-1 as $T).lcm(&1), 1 as $T);
assert_eq!((1 as $T).lcm(&-1), 1 as $T);
assert_eq!((-1 as $T).lcm(&-1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
}
#[test]
fn test_even() {
assert_eq!((-4 as $T).is_even(), true);
assert_eq!((-3 as $T).is_even(), false);
assert_eq!((-2 as $T).is_even(), true);
assert_eq!((-1 as $T).is_even(), false);
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((-4 as $T).is_odd(), false);
assert_eq!((-3 as $T).is_odd(), true);
assert_eq!((-2 as $T).is_odd(), false);
assert_eq!((-1 as $T).is_odd(), true);
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_isize!(i8, test_integer_i8);
impl_integer_for_isize!(i16, test_integer_i16);
impl_integer_for_isize!(i32, test_integer_i32);
impl_integer_for_isize!(i64, test_integer_i64);
impl_integer_for_isize!(isize, test_integer_isize);
macro_rules! impl_integer_for_usize {
($T:ty, $test_mod:ident) => (
impl Integer for $T {
/// Unsigned integer division. Returns the same result as `div` (`/`).
#[inline]
fn div_floor(&self, other: &$T) -> $T { *self / *other }
/// Unsigned integer modulo operation. Returns the same result as `rem` (`%`).
#[inline]
fn mod_floor(&self, other: &$T) -> $T { *self % *other }
/// Calculates the Greatest Common Divisor (GCD) of the number and `other`
#[inline]
fn gcd(&self, other: &$T) -> $T {
// Use Stein's algorithm
let mut m = *self;
let mut n = *other;
if m == 0 || n == 0 { return m | n }
// find common factors of 2
let shift = (m | n).trailing_zeros();
// divide n and m by 2 until odd
// m inside loop
n >>= n.trailing_zeros();
while m != 0 {
m >>= m.trailing_zeros();
if n > m { ::std::mem::swap(&mut n, &mut m) }
m -= n;
}
n << shift
}
/// Calculates the Lowest Common Multiple (LCM) of the number and `other`.
#[inline]
fn lcm(&self, other: &$T) -> $T {
(*self * *other) / self.gcd(other)
}
/// Deprecated, use `is_multiple_of` instead.
#[inline]
fn divides(&self, other: &$T) -> bool { return self.is_multiple_of(other); }
/// Returns `true` if the number is a multiple of `other`.
#[inline]
fn is_multiple_of(&self, other: &$T) -> bool { *self % *other == 0 }
/// Returns `true` if the number is divisible by `2`.
#[inline]
fn is_even(&self) -> bool { (*self) & 1 == 0 }
/// Returns `true` if the number is not divisible by `2`.
#[inline]
fn is_odd(&self) -> bool { !(*self).is_even() }
/// Simultaneous truncated integer division and modulus.
#[inline]
fn div_rem(&self, other: &$T) -> ($T, $T) {
(*self / *other, *self % *other)
}
}
#[cfg(test)]
mod $test_mod {
use Integer;
#[test]
fn test_div_mod_floor() {
assert_eq!((10 as $T).div_floor(&(3 as $T)), 3 as $T);
assert_eq!((10 as $T).mod_floor(&(3 as $T)), 1 as $T);
assert_eq!((10 as $T).div_mod_floor(&(3 as $T)), (3 as $T, 1 as $T));
assert_eq!((5 as $T).div_floor(&(5 as $T)), 1 as $T);
assert_eq!((5 as $T).mod_floor(&(5 as $T)), 0 as $T);
assert_eq!((5 as $T).div_mod_floor(&(5 as $T)), (1 as $T, 0 as $T));
assert_eq!((3 as $T).div_floor(&(7 as $T)), 0 as $T);
assert_eq!((3 as $T).mod_floor(&(7 as $T)), 3 as $T);
assert_eq!((3 as $T).div_mod_floor(&(7 as $T)), (0 as $T, 3 as $T));
}
#[test]
fn test_gcd() {
assert_eq!((10 as $T).gcd(&2), 2 as $T);
assert_eq!((10 as $T).gcd(&3), 1 as $T);
assert_eq!((0 as $T).gcd(&3), 3 as $T);
assert_eq!((3 as $T).gcd(&3), 3 as $T);
assert_eq!((56 as $T).gcd(&42), 14 as $T);
}
#[test]
fn test_gcd_cmp_with_euclidean() {
fn euclidean_gcd(mut m: $T, mut n: $T) -> $T {
while m != 0 {
::std::mem::swap(&mut m, &mut n);
m %= n;
}
n
}
for i in 0..255 {
for j in 0..255 {
assert_eq!(euclidean_gcd(i,j), i.gcd(&j));
}
}
// last value
// FIXME: Use inclusive ranges for above loop when implemented
let i = 255;
for j in 0..255 {
assert_eq!(euclidean_gcd(i,j), i.gcd(&j));
}
assert_eq!(255.gcd(&255), 255);
}
#[test]
fn test_lcm() {
assert_eq!((1 as $T).lcm(&0), 0 as $T);
assert_eq!((0 as $T).lcm(&1), 0 as $T);
assert_eq!((1 as $T).lcm(&1), 1 as $T);
assert_eq!((8 as $T).lcm(&9), 72 as $T);
assert_eq!((11 as $T).lcm(&5), 55 as $T);
assert_eq!((15 as $T).lcm(&17), 255 as $T);
}
#[test]
fn test_is_multiple_of() {
assert!((6 as $T).is_multiple_of(&(6 as $T)));
assert!((6 as $T).is_multiple_of(&(3 as $T)));
assert!((6 as $T).is_multiple_of(&(1 as $T)));
}
#[test]
fn test_even() {
assert_eq!((0 as $T).is_even(), true);
assert_eq!((1 as $T).is_even(), false);
assert_eq!((2 as $T).is_even(), true);
assert_eq!((3 as $T).is_even(), false);
assert_eq!((4 as $T).is_even(), true);
}
#[test]
fn test_odd() {
assert_eq!((0 as $T).is_odd(), false);
assert_eq!((1 as $T).is_odd(), true);
assert_eq!((2 as $T).is_odd(), false);
assert_eq!((3 as $T).is_odd(), true);
assert_eq!((4 as $T).is_odd(), false);
}
}
)
}
impl_integer_for_usize!(u8, test_integer_u8);
impl_integer_for_usize!(u16, test_integer_u16);
impl_integer_for_usize!(u32, test_integer_u32);
impl_integer_for_usize!(u64, test_integer_u64);
impl_integer_for_usize!(usize, test_integer_usize);
|
// Copyright (C) 2016, 2017, 2018 Élisabeth HENRY.
//
// This file is part of Crowbook.
//
// Crowbook is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 2.1 of the License, or
// (at your option) any later version.
//
// Crowbook is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Crowbook. If not, see <http://www.gnu.org/licenses/>.
//! Note: this documentation is relative to `crowbook` *as a library*.
//! For documentation regarding the *program* `crowbook`, see
//! [the Github page](https://github.com/lise-henry/crowbook).
//!
//! # Usage
//!
//! Just like any other library, just add a dependency on `crowbook`
//! in your `Cargo.toml` file. You will probably want to deactivate
//! default features that are mostly useful for the binary:
//!
//! ```ignore
//! crowbook = {version = "0.11", default-features = false}
//! ```
//!
//! # Book
//!
//! The central structure of Crowbook is `Book`, which coordinates everything.
//!
//! Its roles are:
//!
//! * read a book configuration file and setting the book options accordingly;
//! * read the chapters (written in Markdown) listed in this
//! configuration file and pass them to to `Parser`, get back an AST and store it in memory
//! * call the various renderers according to the book's parameters
//! and generate the appopriate files.
//!
//! ## Example
//!
//! ```ignore
//! use crowbook::Book;
//! // Reads configuration file "foo.book" and render all formats according to this
//! // configuration file
//! Book::new()
//! .load_file("foo.book").unwrap()
//! .render_all().unwrap();
//! ```
//!
//! This is basically the code for the `crowbook` binary (though it contains a
//! bit more error handling, checking parameters from command line and so on).
//! This is, however, not very interesting for a library usage.
//!
//! The `Book` structure, however, exposes its `chapter` fields, which contains
//! a vector with an element by chapter. With it, you can access the Markdown
//! for all chapters represented as an Abstact Syntax Tree (i.e., a vector of `Token`s).
//! It is thus possible to create a new renderer (or manipulate this AST in other ways).
//!
//! # Parser
//!
//! It is also possible to directly use `Parser` to transform some markdown string or file
//! to this AST:
//!
//! ```
//! use crowbook::{Parser,Token};
//! let mut parser = Parser::new();
//! let result = parser.parse("Some *valid* Markdown").unwrap();
//! assert_eq!(format!("{:?}", result),
//! r#"[Paragraph([Str("Some "), Emphasis([Str("valid")]), Str(" Markdown")])]"#);
//! ```
//!
//! Of course, you probably want to do something else with this AST than display it.
//! Let's assume you want to count the number of links in a document.
//!
//! ```
//! use crowbook::{Parser,Token};
//! fn count(ast: &[Token]) -> u32 {
//! let mut n = 0;
//! for token in ast {
//! match *token {
//! // It's a link, increase counter
//! Token::Link(_,_,_) => n += 1,
//! // It's not a link, let's count the number of links
//! // inside of the inner element (if there is one)
//! _ => {
//! if let Some(sub_ast) = token.inner() {
//! n += count(sub_ast);
//! }
//! }
//! }
//! }
//! n
//! }
//!
//! let md = "# Here's a [link](http://foo.bar) #\n And *another [one](http://foo.bar)* !";
//!
//! let mut parser = Parser::new();
//! let ast = parser.parse(md).unwrap();
//! assert_eq!(count(&ast), 2);
//! ```
extern crate pulldown_cmark as cmark;
extern crate mustache;
extern crate yaml_rust;
extern crate mime_guess;
extern crate walkdir;
extern crate rustc_serialize;
extern crate rayon;
extern crate crowbook_text_processing;
extern crate crowbook_intl_runtime;
extern crate numerals;
extern crate epub_builder;
extern crate uuid;
extern crate hyphenation;
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "indicatif")]
extern crate indicatif;
#[cfg(feature = "binary")]
extern crate console;
#[cfg(feature = "binary")]
extern crate textwrap;
#[cfg(feature = "proofread")]
extern crate hyper;
#[cfg(feature = "proofread")]
extern crate url;
#[cfg(feature = "proofread")]
extern crate caribon;
#[cfg(feature = "proofread")]
extern crate serde;
#[cfg(feature = "proofread")]
extern crate serde_json;
#[cfg(feature = "proofread")]
#[macro_use]
extern crate serde_derive;
#[cfg(feature = "syntect")]
extern crate syntect;
#[cfg(feature = "nightly")]
extern crate punkt;
pub use parser::Parser;
pub use book::Book;
pub use bookoption::BookOption;
pub use bookoptions::BookOptions;
pub use error::{Result, Error, Source};
pub use token::Token;
pub use token::Data;
pub use number::Number;
pub use resource_handler::ResourceHandler;
pub use renderer::Renderer;
pub use book_renderer::BookRenderer;
pub use chapter::Chapter;
pub use stats::Stats;
#[macro_use]
#[doc(hidden)]
mod localize_macros;
#[macro_use]
mod html;
mod html_dir;
mod error;
mod book;
mod epub;
mod latex;
mod odt;
mod parser;
mod token;
mod cleaner;
mod chapter;
mod number;
mod resource_handler;
mod bookoptions;
mod lang;
mod renderer;
mod book_renderer;
mod html_single;
mod html_if;
mod syntax;
mod stats;
#[cfg(feature = "binary")]
mod style;
#[cfg(not(feature = "binary"))]
mod style_stubs;
#[cfg(not(feature = "binary"))]
use style_stubs as style;
#[cfg(feature = "indicatif")]
mod book_bars;
#[cfg(not(feature = "indicatif"))]
mod book_bars_stubs;
#[cfg(not(feature = "indicatif"))]
use book_bars_stubs as book_bars;
mod zipper;
mod templates;
mod bookoption;
mod misc;
mod text_view;
#[cfg(feature = "proofread")]
mod grammar_check;
#[cfg(feature = "proofread")]
mod grammalecte;
#[cfg(feature = "proofread")]
mod repetition_check;
#[cfg(test)]
mod tests;
Bump crowbook version in lib.rs
// Copyright (C) 2016, 2017, 2018 Élisabeth HENRY.
//
// This file is part of Crowbook.
//
// Crowbook is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 2.1 of the License, or
// (at your option) any later version.
//
// Crowbook is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Crowbook. If not, see <http://www.gnu.org/licenses/>.
//! Note: this documentation is relative to `crowbook` *as a library*.
//! For documentation regarding the *program* `crowbook`, see
//! [the Github page](https://github.com/lise-henry/crowbook).
//!
//! # Usage
//!
//! Just like any other library, just add a dependency on `crowbook`
//! in your `Cargo.toml` file. You will probably want to deactivate
//! default features that are mostly useful for the binary:
//!
//! ```ignore
//! crowbook = {version = "0.14.1", default-features = false}
//! ```
//!
//! # Book
//!
//! The central structure of Crowbook is `Book`, which coordinates everything.
//!
//! Its roles are:
//!
//! * read a book configuration file and setting the book options accordingly;
//! * read the chapters (written in Markdown) listed in this
//! configuration file and pass them to to `Parser`, get back an AST and store it in memory
//! * call the various renderers according to the book's parameters
//! and generate the appopriate files.
//!
//! ## Example
//!
//! ```ignore
//! use crowbook::Book;
//! // Reads configuration file "foo.book" and render all formats according to this
//! // configuration file
//! Book::new()
//! .load_file("foo.book").unwrap()
//! .render_all().unwrap();
//! ```
//!
//! This is basically the code for the `crowbook` binary (though it contains a
//! bit more error handling, checking parameters from command line and so on).
//! This is, however, not very interesting for a library usage.
//!
//! The `Book` structure, however, exposes its `chapter` fields, which contains
//! a vector with an element by chapter. With it, you can access the Markdown
//! for all chapters represented as an Abstact Syntax Tree (i.e., a vector of `Token`s).
//! It is thus possible to create a new renderer (or manipulate this AST in other ways).
//!
//! # Parser
//!
//! It is also possible to directly use `Parser` to transform some markdown string or file
//! to this AST:
//!
//! ```
//! use crowbook::{Parser,Token};
//! let mut parser = Parser::new();
//! let result = parser.parse("Some *valid* Markdown").unwrap();
//! assert_eq!(format!("{:?}", result),
//! r#"[Paragraph([Str("Some "), Emphasis([Str("valid")]), Str(" Markdown")])]"#);
//! ```
//!
//! Of course, you probably want to do something else with this AST than display it.
//! Let's assume you want to count the number of links in a document.
//!
//! ```
//! use crowbook::{Parser,Token};
//! fn count(ast: &[Token]) -> u32 {
//! let mut n = 0;
//! for token in ast {
//! match *token {
//! // It's a link, increase counter
//! Token::Link(_,_,_) => n += 1,
//! // It's not a link, let's count the number of links
//! // inside of the inner element (if there is one)
//! _ => {
//! if let Some(sub_ast) = token.inner() {
//! n += count(sub_ast);
//! }
//! }
//! }
//! }
//! n
//! }
//!
//! let md = "# Here's a [link](http://foo.bar) #\n And *another [one](http://foo.bar)* !";
//!
//! let mut parser = Parser::new();
//! let ast = parser.parse(md).unwrap();
//! assert_eq!(count(&ast), 2);
//! ```
extern crate pulldown_cmark as cmark;
extern crate mustache;
extern crate yaml_rust;
extern crate mime_guess;
extern crate walkdir;
extern crate rustc_serialize;
extern crate rayon;
extern crate crowbook_text_processing;
extern crate crowbook_intl_runtime;
extern crate numerals;
extern crate epub_builder;
extern crate uuid;
extern crate hyphenation;
#[macro_use]
extern crate log;
#[macro_use]
extern crate lazy_static;
#[cfg(feature = "indicatif")]
extern crate indicatif;
#[cfg(feature = "binary")]
extern crate console;
#[cfg(feature = "binary")]
extern crate textwrap;
#[cfg(feature = "proofread")]
extern crate hyper;
#[cfg(feature = "proofread")]
extern crate url;
#[cfg(feature = "proofread")]
extern crate caribon;
#[cfg(feature = "proofread")]
extern crate serde;
#[cfg(feature = "proofread")]
extern crate serde_json;
#[cfg(feature = "proofread")]
#[macro_use]
extern crate serde_derive;
#[cfg(feature = "syntect")]
extern crate syntect;
#[cfg(feature = "nightly")]
extern crate punkt;
pub use parser::Parser;
pub use book::Book;
pub use bookoption::BookOption;
pub use bookoptions::BookOptions;
pub use error::{Result, Error, Source};
pub use token::Token;
pub use token::Data;
pub use number::Number;
pub use resource_handler::ResourceHandler;
pub use renderer::Renderer;
pub use book_renderer::BookRenderer;
pub use chapter::Chapter;
pub use stats::Stats;
#[macro_use]
#[doc(hidden)]
mod localize_macros;
#[macro_use]
mod html;
mod html_dir;
mod error;
mod book;
mod epub;
mod latex;
mod odt;
mod parser;
mod token;
mod cleaner;
mod chapter;
mod number;
mod resource_handler;
mod bookoptions;
mod lang;
mod renderer;
mod book_renderer;
mod html_single;
mod html_if;
mod syntax;
mod stats;
#[cfg(feature = "binary")]
mod style;
#[cfg(not(feature = "binary"))]
mod style_stubs;
#[cfg(not(feature = "binary"))]
use style_stubs as style;
#[cfg(feature = "indicatif")]
mod book_bars;
#[cfg(not(feature = "indicatif"))]
mod book_bars_stubs;
#[cfg(not(feature = "indicatif"))]
use book_bars_stubs as book_bars;
mod zipper;
mod templates;
mod bookoption;
mod misc;
mod text_view;
#[cfg(feature = "proofread")]
mod grammar_check;
#[cfg(feature = "proofread")]
mod grammalecte;
#[cfg(feature = "proofread")]
mod repetition_check;
#[cfg(test)]
mod tests;
|
use cpu::{Cpu};
use std::old_io::{File, BytesReader, Seek};
use std::old_io;
pub fn load_bin(file: &mut File, cpu: &mut Cpu) -> () {
let mut i: usize = 0;
for byte in file.bytes() {
let byte = match byte {
Ok(number) => number as i8,
Err(e) => panic!("{}", e.desc),
};
cpu.memory.write_byte(i, byte);
i += 1;
}
}
pub fn load_c16(file: &mut File, cpu: &mut Cpu) -> () {
let magic_number = match file.read_be_u32() {
Ok(number) => number,
Err(e) => panic!("{}", e.desc)
};
if magic_number == 0x43483135 {
file.read_u8();
file.read_u8();
let rom_size: u32 = match file.read_be_u32() {
Ok(rom) => rom,
Err(e) => panic!("{}", e.desc)
};
cpu.pc = match file.read_be_i16() {
Ok(ip) => ip,
Err(e) => panic!("{}", e.desc)
};
let checksum: u32 = match file.read_be_u32(){
Ok(sum) => sum,
Err(e) => panic!("{}", e.desc)
};
check_rom_size(file, rom_size);
crc32_checksum(file, checksum);
match file.seek(0x10, old_io::SeekSet){
Ok(ok) => ok,
Err(e) => panic!("{}", e.desc)
};
load_bin(file, cpu);
} else {
match file.seek(0, old_io::SeekSet){
Ok(ok) => ok,
Err(e) => panic!("{}", e.desc)
};
load_bin(file, cpu);
}
}
fn check_rom_size(file: &mut File, rom_size: u32) -> () {
let file_size = match file.stat() {
Ok(file_stat) => file_stat.size - 0x10,
Err(e) => panic!("{}", e.desc)
};
if rom_size as u64 != file_size {
panic!("The ROM size is not what the header says");
}
}
fn crc32_checksum(file: &mut File, checksum: u32) -> () {
()
}
Added checking the checksum of a .c16 file
use cpu::{Cpu};
use std::old_io::{File, BytesReader, Seek};
use std::old_io;
pub fn load_bin(file: &mut File, cpu: &mut Cpu) -> () {
let mut i: usize = 0;
for byte in file.bytes() {
let byte = match byte {
Ok(number) => number as i8,
Err(e) => panic!("{}", e.desc),
};
cpu.memory.write_byte(i, byte);
i += 1;
}
}
pub fn load_c16(file: &mut File, cpu: &mut Cpu) -> () {
let magic_number = match file.read_be_u32() {
Ok(number) => number,
Err(e) => panic!("{}", e.desc)
};
if magic_number == 0x43483135 {
file.read_u8();
file.read_u8();
let rom_size: u32 = match file.read_be_u32() {
Ok(rom) => rom,
Err(e) => panic!("{}", e.desc)
};
cpu.pc = match file.read_be_i16() {
Ok(ip) => ip,
Err(e) => panic!("{}", e.desc)
};
let checksum: u32 = match file.read_be_u32(){
Ok(sum) => sum,
Err(e) => panic!("{}", e.desc)
};
check_rom_size(file, rom_size);
crc32_checksum(file, checksum);
match file.seek(0x10, old_io::SeekSet){
Ok(ok) => ok,
Err(e) => panic!("{}", e.desc)
};
load_bin(file, cpu);
} else {
match file.seek(0, old_io::SeekSet){
Ok(ok) => ok,
Err(e) => panic!("{}", e.desc)
};
load_bin(file, cpu);
}
}
fn check_rom_size(file: &mut File, rom_size: u32) -> () {
let file_size = match file.stat() {
Ok(file_stat) => file_stat.size - 0x10,
Err(e) => panic!("{}", e.desc)
};
if rom_size as u64 != file_size {
panic!("The ROM size is not what the header says");
}
}
fn crc32_checksum(file: &mut File, checksum: u32) -> () {
let table: [u32; 256] =[ //Precalculated table
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba,
0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3,
0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988,
0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91,
0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7,
0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec,
0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5,
0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172,
0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940,
0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59,
0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116,
0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f,
0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d,
0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a,
0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433,
0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818,
0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e,
0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457,
0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c,
0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65,
0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb,
0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0,
0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9,
0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086,
0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4,
0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad,
0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a,
0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683,
0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1,
0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe,
0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7,
0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc,
0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252,
0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b,
0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60,
0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79,
0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f,
0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04,
0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d,
0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a,
0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38,
0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21,
0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e,
0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777,
0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45,
0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2,
0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db,
0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0,
0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6,
0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf,
0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94,
0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d];
let mut crc: u32 = 0xFFFFFFFF;
match file.seek(0x10, old_io::SeekSet){
Ok(ok) => ok,
Err(e) => panic!("{}", e.desc)
};
for byte in file.bytes() {
let byte = match byte {
Ok(number) => number as u8,
Err(e) => panic!("{}", e.desc),
};
crc = (crc >> 8) ^ table[(crc as u8 ^ byte) as usize];
}
crc = !crc;
if checksum != crc {
panic!("The ROM is corrupted.");
};
()
} |
//! TIS-100 emulator implementations.
use core::Port::*;
use io::IoBus;
use node::{Node, BasicExecutionNode};
use save::Save;
/// Implements the *Simple Sandbox* puzzle from the game.
///
/// Example:
///
/// ```
/// use tis_100::save::parse_save;
/// use tis_100::machine::Sandbox;
///
/// // This program reads the value from the console and simply passes it to the console output.
/// let src = "@1\nMOV UP DOWN\n@5\nMOV UP DOWN\n@9\nMOV UP RIGHT\n@10\nMOV LEFT DOWN\n";
///
/// let save = parse_save(src).unwrap();
/// let mut sandbox = Sandbox::with_save(&save);
///
/// sandbox.write_console(42);
///
/// for _ in 0..5 {
/// sandbox.step();
/// }
///
/// assert_eq!(sandbox.read_console(), Some(42));
/// ```
#[derive(Debug)]
pub struct Sandbox {
nodes: Vec<BasicExecutionNode>,
bus: IoBus,
}
impl Sandbox {
/// Construct a new `Sandbox` with programs from the `Save`.
pub fn with_save(save: &Save) -> Sandbox {
let mut sandbox = Sandbox {
nodes: Vec::new(),
bus: IoBus::new(),
};
sandbox.setup(save);
sandbox
}
/// Setup the connections between nodes. Each node is fully connected to its neighbors.
fn setup(&mut self, save: &Save) {
for node_num in 0..12 {
match save.get(&node_num) {
Some(prog) => self.nodes.push(BasicExecutionNode::with_program(prog.clone())),
None => self.nodes.push(BasicExecutionNode::new()),
};
}
self.bus.connect_full(0, 1, RIGHT)
.connect_full(1, 2, RIGHT)
.connect_full(2, 3, RIGHT)
.connect_full(4, 5, RIGHT)
.connect_full(5, 6, RIGHT)
.connect_full(6, 7, RIGHT)
.connect_full(8, 9, RIGHT)
.connect_full(9, 10, RIGHT)
.connect_full(10, 11, RIGHT);
self.bus.connect_full(0, 4, DOWN)
.connect_full(1, 5, DOWN)
.connect_full(2, 6, DOWN)
.connect_full(3, 7, DOWN)
.connect_full(4, 8, DOWN)
.connect_full(5, 9, DOWN)
.connect_full(6, 10, DOWN)
.connect_full(7, 11, DOWN)
// The console output is connected to node 10.
.connect_half(10, 12, DOWN)
// The console input is connected to node 1.
.connect_half(12, 1, DOWN);
}
/// Step each node through one instruction.
pub fn step(&mut self) {
// Step each node
for (id, node) in self.nodes.iter_mut().enumerate() {
let mut view = self.bus.view(id);
node.step(&mut view);
}
// Synchronize writes and reads on each node
for (id, node) in self.nodes.iter_mut().enumerate() {
let mut view = self.bus.view(id);
node.sync(&mut view);
}
// Commit writes so they are available on the next cycle.
self.bus.commit();
}
/// Send a value from the console to node 1.
pub fn write_console(&mut self, value: isize) {
self.bus.view(12).write(DOWN, value);
}
/// Read a value from node 10 and send it to the console.
pub fn read_console(&mut self) -> Option<isize> {
self.bus.view(12).read(UP)
}
}
Refactor Sandbox to use a lower-level Tis100 CPU.
//! TIS-100 emulator implementations.
use std::collections::VecMap;
use core::Port::*;
use io::IoBus;
use node::{Node, BasicExecutionNode};
use save::Save;
/// Implements the *Simple Sandbox* puzzle from the game.
///
/// Example:
///
/// ```
/// use tis_100::save::parse_save;
/// use tis_100::machine::Sandbox;
///
/// // This program reads the value from the console and simply passes it to the console output.
/// let src = "@1\nMOV UP DOWN\n@5\nMOV UP DOWN\n@9\nMOV UP RIGHT\n@10\nMOV LEFT DOWN\n";
///
/// let save = parse_save(src).unwrap();
/// let mut sandbox = Sandbox::with_save(&save);
///
/// sandbox.write_console(42);
///
/// for _ in 0..5 {
/// sandbox.step();
/// }
///
/// assert_eq!(sandbox.read_console(), Some(42));
/// ```
#[derive(Debug)]
pub struct Sandbox {
cpu: Tis100,
write_node: usize,
read_node: usize,
}
impl Sandbox {
/// Construct a new `Sandbox` with programs from the `Save`.
pub fn with_save(save: &Save) -> Sandbox {
let mut sandbox = Sandbox {
cpu: Tis100::new(),
// write_node and read_node will both be set during setup.
write_node: 0,
read_node: 0,
};
sandbox.setup(save);
sandbox
}
/// Setup the connections between nodes. Each node is fully connected to its neighbors.
fn setup(&mut self, save: &Save) {
for node_num in 0..12 {
match save.get(&node_num) {
Some(prog) => self.cpu.add_node(node_num, Box::new(BasicExecutionNode::with_program(prog.clone()))),
None => self.cpu.add_node(node_num, Box::new(BasicExecutionNode::new())),
};
}
self.write_node = self.cpu.add_input(1);
self.read_node = self.cpu.add_output(10);
}
/// Step each node through one instruction.
pub fn step(&mut self) {
self.cpu.step();
}
/// Send a value from the console to node 1.
pub fn write_console(&mut self, value: isize) {
self.cpu.write(self.write_node, value);
}
/// Read a value from node 10 and send it to the console.
pub fn read_console(&mut self) -> Option<isize> {
self.cpu.read(self.read_node)
}
}
/// An empty TIS-100 CPU.
#[derive(Debug)]
pub struct Tis100 {
nodes: VecMap<Box<Node>>,
// Used to track available node IDs for input and output ports
next_node: usize,
bus: IoBus,
}
impl Tis100 {
/// Construct a new, empty `Tis100`.
pub fn new() -> Tis100 {
let mut tis100 = Tis100 {
nodes: VecMap::new(),
next_node: 12,
bus: IoBus::new(),
};
tis100.setup();
tis100
}
/// Setup the IO connections between nodes.
fn setup(&mut self) {
self.bus.connect_full(0, 1, RIGHT)
.connect_full(1, 2, RIGHT)
.connect_full(2, 3, RIGHT)
.connect_full(4, 5, RIGHT)
.connect_full(5, 6, RIGHT)
.connect_full(6, 7, RIGHT)
.connect_full(8, 9, RIGHT)
.connect_full(9, 10, RIGHT)
.connect_full(10, 11, RIGHT);
self.bus.connect_full(0, 4, DOWN)
.connect_full(1, 5, DOWN)
.connect_full(2, 6, DOWN)
.connect_full(3, 7, DOWN)
.connect_full(4, 8, DOWN)
.connect_full(5, 9, DOWN)
.connect_full(6, 10, DOWN)
.connect_full(7, 11, DOWN);
}
/// Add a new node with the given ID to the system.
pub fn add_node(&mut self, index: usize, node: Box<Node>) {
self.nodes.insert(index, node);
}
/// Create a new input port to a CPU node. Returns a node ID that can be used to write values on
/// the port. Only nodes 0 through 3 can receive inputs. All inputs are connected `UP` from the
/// receiving nodes.
pub fn add_input(&mut self, node: usize) -> usize {
assert!(node < 4);
let input_node = self.next_node;
self.next_node += 1;
self.bus.connect_half(input_node, node, DOWN);
input_node
}
/// Create a new output port from a CPU node. Returns a node ID that can be used to read values
/// from the port. Only nodes 8 through 11 can send outputs. All outputs are connected `DOWN`
/// from the sending nodes.
pub fn add_output(&mut self, node: usize) -> usize {
assert!(node > 7 && node < 12);
let output_node = self.next_node;
self.next_node += 1;
self.bus.connect_half(node, output_node, DOWN);
output_node
}
/// Read a value from an output node.
pub fn read(&mut self, node: usize) -> Option<isize> {
self.bus.view(node).read(UP)
}
/// Send a value to an input node.
pub fn write(&mut self, node: usize, value: isize) {
self.bus.view(node).write(DOWN, value);
}
/// Execute one instruction cycle on all nodes in the system.
pub fn step(&mut self) {
// Step each node
for (id, node) in self.nodes.iter_mut() {
let mut view = self.bus.view(id);
node.step(&mut view);
}
// Synchronize writes and reads on each node
for (id, node) in self.nodes.iter_mut() {
let mut view = self.bus.view(id);
node.sync(&mut view);
}
// Commit writes so they are available on the next cycle.
self.bus.commit();
}
}
|
use std::collections::BTreeMap;
use std::string::ToString;
use std::default::Default;
use rustc_serialize::json::{Json,ToJson};
use parse::*;
use parse::Presence::*;
use record;
use record::{Record, PartialRecord};
#[derive(Clone, PartialEq, Debug)]
pub enum MailboxRole {
Inbox,
Archive,
Drafts,
Outbox,
Sent,
Trash,
Spam,
Templates,
Custom(String),
}
impl ToString for MailboxRole {
fn to_string(&self) -> String {
match *self {
MailboxRole::Inbox => "inbox".to_string(),
MailboxRole::Archive => "archive".to_string(),
MailboxRole::Drafts => "drafts".to_string(),
MailboxRole::Outbox => "outbox".to_string(),
MailboxRole::Sent => "sent".to_string(),
MailboxRole::Trash => "trash".to_string(),
MailboxRole::Spam => "spam".to_string(),
MailboxRole::Templates => "templates".to_string(),
MailboxRole::Custom(ref r) => {
let mut s = "x-".to_string();
s.push_str(r.as_ref());
s
},
}
}
}
impl ToJson for MailboxRole {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
impl FromJson for MailboxRole {
fn from_json(json: &Json) -> Result<MailboxRole,ParseError> {
match *json {
Json::String(ref v) => match v.as_ref() {
"inbox" => Ok(MailboxRole::Inbox),
"archive" => Ok(MailboxRole::Archive),
"drafts" => Ok(MailboxRole::Drafts),
"outbox" => Ok(MailboxRole::Outbox),
"sent" => Ok(MailboxRole::Sent),
"trash" => Ok(MailboxRole::Trash),
"spam" => Ok(MailboxRole::Spam),
"templates" => Ok(MailboxRole::Templates),
r => {
let bits: Vec<&str> = r.splitn(2, '-').collect();
match bits.len() {
2 => Ok(MailboxRole::Custom(bits[1].to_string())),
_ => Err(ParseError::InvalidStructure("MailboxRole".to_string())),
}
}
},
_ => Err(ParseError::InvalidJsonType("MailboxRole".to_string())),
}
}
}
make_record_type!(Mailbox, PartialMailbox, "Mailbox",
name: String => "name",
parent_id: Option<String> => "parentId",
role: Option<MailboxRole> => "role",
precedence: i32 => "precedence",
must_be_only_mailbox: bool => "mustBeOnlyMailbox",
may_read_items: bool => "mayReadItems",
may_add_items: bool => "mayAddItems",
may_remove_items: bool => "mayRemoveItems",
may_create_child: bool => "mayCreateChild",
may_rename: bool => "mayRename",
may_delete: bool => "mayDelete",
total_messages: usize => "totalMessages",
unread_messages: usize => "unreadMessages",
total_threads: usize => "totalThreads",
unread_threads: usize => "unreadThreads"
);
Rename Mailbox precedence to sortOrder (latest spec)
use std::collections::BTreeMap;
use std::string::ToString;
use std::default::Default;
use rustc_serialize::json::{Json,ToJson};
use parse::*;
use parse::Presence::*;
use record;
use record::{Record, PartialRecord};
#[derive(Clone, PartialEq, Debug)]
pub enum MailboxRole {
Inbox,
Archive,
Drafts,
Outbox,
Sent,
Trash,
Spam,
Templates,
Custom(String),
}
impl ToString for MailboxRole {
fn to_string(&self) -> String {
match *self {
MailboxRole::Inbox => "inbox".to_string(),
MailboxRole::Archive => "archive".to_string(),
MailboxRole::Drafts => "drafts".to_string(),
MailboxRole::Outbox => "outbox".to_string(),
MailboxRole::Sent => "sent".to_string(),
MailboxRole::Trash => "trash".to_string(),
MailboxRole::Spam => "spam".to_string(),
MailboxRole::Templates => "templates".to_string(),
MailboxRole::Custom(ref r) => {
let mut s = "x-".to_string();
s.push_str(r.as_ref());
s
},
}
}
}
impl ToJson for MailboxRole {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
impl FromJson for MailboxRole {
fn from_json(json: &Json) -> Result<MailboxRole,ParseError> {
match *json {
Json::String(ref v) => match v.as_ref() {
"inbox" => Ok(MailboxRole::Inbox),
"archive" => Ok(MailboxRole::Archive),
"drafts" => Ok(MailboxRole::Drafts),
"outbox" => Ok(MailboxRole::Outbox),
"sent" => Ok(MailboxRole::Sent),
"trash" => Ok(MailboxRole::Trash),
"spam" => Ok(MailboxRole::Spam),
"templates" => Ok(MailboxRole::Templates),
r => {
let bits: Vec<&str> = r.splitn(2, '-').collect();
match bits.len() {
2 => Ok(MailboxRole::Custom(bits[1].to_string())),
_ => Err(ParseError::InvalidStructure("MailboxRole".to_string())),
}
}
},
_ => Err(ParseError::InvalidJsonType("MailboxRole".to_string())),
}
}
}
make_record_type!(Mailbox, PartialMailbox, "Mailbox",
name: String => "name",
parent_id: Option<String> => "parentId",
role: Option<MailboxRole> => "role",
sort_order: i32 => "sortOrder",
must_be_only_mailbox: bool => "mustBeOnlyMailbox",
may_read_items: bool => "mayReadItems",
may_add_items: bool => "mayAddItems",
may_remove_items: bool => "mayRemoveItems",
may_create_child: bool => "mayCreateChild",
may_rename: bool => "mayRename",
may_delete: bool => "mayDelete",
total_messages: usize => "totalMessages",
unread_messages: usize => "unreadMessages",
total_threads: usize => "totalThreads",
unread_threads: usize => "unreadThreads"
);
|
/// Given a list of entries `items` and the query string, filter out the
/// matched entries using fuzzy search algorithm.
use std::sync::{Arc, RwLock};
use std::sync::mpsc::Sender;
use std::collections::HashMap;
use event::Event;
use item::{Item, MatchedItem, MatchedRange};
use util::eventbox::EventBox;
use score;
use orderedvec::OrderedVec;
pub struct Matcher {
tx_output: Sender<MatchedItem>, // channel to send output to
eb_req: Arc<EventBox<Event>>, // event box that recieve requests
eb_notify: Arc<EventBox<Event>>, // event box that send out notification
items: Arc<RwLock<Vec<Item>>>,
item_pos: usize,
num_matched: u64,
query: String,
cache: HashMap<String, MatcherCache>,
}
impl Matcher {
pub fn new(items: Arc<RwLock<Vec<Item>>>, tx_output: Sender<MatchedItem>,
eb_req: Arc<EventBox<Event>>, eb_notify: Arc<EventBox<Event>>) -> Self {
Matcher {
tx_output: tx_output,
eb_req: eb_req,
eb_notify: eb_notify,
items: items,
item_pos: 0,
num_matched: 0,
query: String::new(),
cache: HashMap::new(),
}
}
pub fn process(&mut self) {
let ref mut cache = self.cache.get_mut(&self.query).unwrap().matched_items;
loop {
let items = self.items.read().unwrap();
if let Some(item) = items.get(self.item_pos) {
if let Some(matched) = match_item(self.item_pos, &item.text, &self.query) {
self.num_matched += 1;
cache.push(matched.clone());
let _ = self.tx_output.send(matched);
}
} else {
(*self.eb_notify).set(Event::EvMatcherEnd, Box::new(true));
break;
}
self.item_pos += 1;
(*self.eb_notify).set(Event::EvMatcherUpdateProcess, Box::new((self.num_matched, items.len() as u64, self.item_pos as u64)));
// check if the current process need to be stopped
if !self.eb_req.is_empty() {
break;
}
}
}
pub fn output_from_cache(&mut self) {
if !self.cache.contains_key(&self.query) {
return;
}
let ref mut matched_items = self.cache.get_mut(&self.query).unwrap().matched_items;
let total = matched_items.len();
loop {
if let Some(ref matched) = matched_items.get(self.item_pos) {
self.num_matched += 1;
let _ = self.tx_output.send((*matched).clone());
self.item_pos += 1;
(*self.eb_notify).set(Event::EvMatcherUpdateProcess, Box::new((self.num_matched, total as u64, self.item_pos as u64)));
} else {
break;
}
if !self.eb_req.is_empty() {
break;
}
}
}
fn reset_query(&mut self, query: &str) {
self.query.clear();
self.query.push_str(query);
self.num_matched = 0;
self.item_pos = 0;
self.cache.entry(query.to_string()).or_insert_with(|| MatcherCache::new());
}
pub fn run(&mut self) {
loop {
for (e, val) in (*self.eb_req).wait() {
match e {
Event::EvMatcherNewItem => {}
Event::EvMatcherResetQuery => {
self.reset_query(&val.downcast::<String>().unwrap());
(*self.eb_notify).set(Event::EvMatcherStart, Box::new(true));
}
_ => {}
}
}
self.output_from_cache();
self.process();
}
}
}
fn match_item(index: usize, item: &str, query: &str) -> Option<MatchedItem> {
let matched_result = score::fuzzy_match(item, query);
if matched_result == None {
return None;
}
let (score, matched_range) = matched_result.unwrap();
let mut item = MatchedItem::new(index);
item.set_matched_range(MatchedRange::Chars(matched_range));
item.set_score(score);
Some(item)
}
struct MatcherCache {
matched_items: OrderedVec<MatchedItem>,
}
impl MatcherCache {
pub fn new() -> Self {
MatcherCache {
matched_items: OrderedVec::new(),
}
}
pub fn push(&mut self, matched_item: MatchedItem) {
self.matched_items.push(matched_item);
}
}
[matcher] add cache
/// Given a list of entries `items` and the query string, filter out the
/// matched entries using fuzzy search algorithm.
use std::sync::{Arc, RwLock};
use std::sync::mpsc::Sender;
use std::collections::HashMap;
use event::Event;
use item::{Item, MatchedItem, MatchedRange};
use util::eventbox::EventBox;
use score;
use orderedvec::OrderedVec;
pub struct Matcher {
tx_output: Sender<MatchedItem>, // channel to send output to
eb_req: Arc<EventBox<Event>>, // event box that recieve requests
eb_notify: Arc<EventBox<Event>>, // event box that send out notification
items: Arc<RwLock<Vec<Item>>>,
item_pos: usize,
num_matched: u64,
query: String,
cache: HashMap<String, MatcherCache>,
}
impl Matcher {
pub fn new(items: Arc<RwLock<Vec<Item>>>, tx_output: Sender<MatchedItem>,
eb_req: Arc<EventBox<Event>>, eb_notify: Arc<EventBox<Event>>) -> Self {
let mut cache = HashMap::new();
cache.entry("".to_string()).or_insert(MatcherCache::new());
Matcher {
tx_output: tx_output,
eb_req: eb_req,
eb_notify: eb_notify,
items: items,
item_pos: 0,
num_matched: 0,
query: String::new(),
cache: cache,
}
}
pub fn process(&mut self) {
let ref mut cache = self.cache.get_mut(&self.query).unwrap();
self.item_pos = cache.item_pos;
loop {
let items = self.items.read().unwrap();
if let Some(item) = items.get(self.item_pos) {
if let Some(matched) = match_item(self.item_pos, &item.text, &self.query) {
self.num_matched += 1;
cache.matched_items.push(matched.clone());
let _ = self.tx_output.send(matched);
}
} else {
(*self.eb_notify).set(Event::EvMatcherEnd, Box::new(true));
break;
}
self.item_pos += 1;
cache.item_pos = self.item_pos;
(*self.eb_notify).set(Event::EvMatcherUpdateProcess, Box::new((self.num_matched, items.len() as u64, self.item_pos as u64)));
// check if the current process need to be stopped
if !self.eb_req.is_empty() {
break;
}
}
}
pub fn output_from_cache(&mut self) {
if !self.cache.contains_key(&self.query) {
return;
}
let ref mut matched_items = self.cache.get_mut(&self.query).unwrap().matched_items;
let total = matched_items.len();
loop {
if let Some(ref matched) = matched_items.get(self.item_pos) {
self.num_matched += 1;
let _ = self.tx_output.send((*matched).clone());
self.item_pos += 1;
(*self.eb_notify).set(Event::EvMatcherUpdateProcess, Box::new((self.num_matched, total as u64, self.item_pos as u64)));
} else {
break;
}
if !self.eb_req.is_empty() {
break;
}
}
}
fn reset_query(&mut self, query: &str) {
self.query.clear();
self.query.push_str(query);
self.num_matched = 0;
self.item_pos = 0;
self.cache.entry(query.to_string()).or_insert(MatcherCache::new());
}
pub fn run(&mut self) {
loop {
for (e, val) in (*self.eb_req).wait() {
match e {
Event::EvMatcherNewItem => {}
Event::EvMatcherResetQuery => {
self.reset_query(&val.downcast::<String>().unwrap());
(*self.eb_notify).set(Event::EvMatcherStart, Box::new(true));
}
_ => {}
}
}
self.output_from_cache();
self.process();
}
}
}
fn match_item(index: usize, item: &str, query: &str) -> Option<MatchedItem> {
let matched_result = score::fuzzy_match(item, query);
if matched_result == None {
return None;
}
let (score, matched_range) = matched_result.unwrap();
let mut item = MatchedItem::new(index);
item.set_matched_range(MatchedRange::Chars(matched_range));
item.set_score(score);
Some(item)
}
struct MatcherCache {
matched_items: OrderedVec<MatchedItem>,
pub item_pos: usize,
}
impl MatcherCache {
pub fn new() -> Self {
MatcherCache {
item_pos: 0,
matched_items: OrderedVec::new(),
}
}
pub fn push(&mut self, matched_item: MatchedItem) {
self.matched_items.push(matched_item);
}
}
|
//! MiniDfs Cluster module
//!
//! MiniDFS launches a embedded HDFS cluster, which is a full feature of a real HDFS cluster
//! It is usually for testing.
//!
//! ## Example
//!
//! ```ignore
//! let mut conf = MiniDfsConf::new();
//! let dfs = MiniDFS::start(&mut conf).unwrap();
//! let port = dfs.namenode_port();
//! ...
//! dfs.stop()
//! ```
use libc::{c_char, c_int};
use std::ffi;
use std::mem;
use std::str;
use native::*;
pub struct MiniDFS
{
cluster: *const NativeMiniDfsCluster
}
impl MiniDFS
{
pub fn start(conf: &MiniDfsConf) -> Option<MiniDFS>
{
match unsafe { nmdCreate(conf) } {
val if !val.is_null() => Some(MiniDFS { cluster: val }),
_ => None
}
}
pub fn stop(&self)
{
unsafe {
nmdShutdown(self.cluster);
nmdFree(self.cluster);
}
}
pub fn wait_for_clusterup(&self) -> bool
{
if unsafe { nmdWaitClusterUp(self.cluster) } == 0 { true } else { false }
}
pub fn namenode_port(&self) -> Option<i32>
{
match unsafe { nmdGetNameNodePort(self.cluster) as i32 } {
val if val > 0 => Some(val),
_ => None
}
}
pub fn namenode_http_addr(&self) -> Option<(&str, i32)>
{
let mut hostname: *mut c_char = unsafe {mem::zeroed()};
let mut port: c_int = 0;
match unsafe {
nmdGetNameNodeHttpAddress(self.cluster, &mut port, &mut hostname)
} {
0 => {
let slice = unsafe { ffi::CStr::from_ptr(hostname) }.to_bytes();
let str = str::from_utf8(slice).unwrap();
Some((str, port as i32))
},
_ => None
}
}
pub fn set_hdfs_builder(&self, builder: *mut hdfsBuilder) -> bool
{
if unsafe { nmdConfigureHdfsBuilder(self.cluster, builder) } == 0
{ true } else { false }
}
}
Changed the comments in minidfs.
//! MiniDfs Cluster
//!
//! MiniDFS provides a embedded HDFS cluster. It is usually for testing.
//!
//! ## Example
//!
//! ```ignore
//! let mut conf = MiniDfsConf::new();
//! let dfs = MiniDFS::start(&mut conf).unwrap();
//! let port = dfs.namenode_port();
//! ...
//! dfs.stop()
//! ```
use libc::{c_char, c_int};
use std::ffi;
use std::mem;
use std::str;
use native::*;
pub struct MiniDFS
{
cluster: *const NativeMiniDfsCluster
}
impl MiniDFS
{
pub fn start(conf: &MiniDfsConf) -> Option<MiniDFS>
{
match unsafe { nmdCreate(conf) } {
val if !val.is_null() => Some(MiniDFS { cluster: val }),
_ => None
}
}
pub fn stop(&self)
{
unsafe {
nmdShutdown(self.cluster);
nmdFree(self.cluster);
}
}
pub fn wait_for_clusterup(&self) -> bool
{
if unsafe { nmdWaitClusterUp(self.cluster) } == 0 { true } else { false }
}
pub fn namenode_port(&self) -> Option<i32>
{
match unsafe { nmdGetNameNodePort(self.cluster) as i32 } {
val if val > 0 => Some(val),
_ => None
}
}
pub fn namenode_http_addr(&self) -> Option<(&str, i32)>
{
let mut hostname: *mut c_char = unsafe {mem::zeroed()};
let mut port: c_int = 0;
match unsafe {
nmdGetNameNodeHttpAddress(self.cluster, &mut port, &mut hostname)
} {
0 => {
let slice = unsafe { ffi::CStr::from_ptr(hostname) }.to_bytes();
let str = str::from_utf8(slice).unwrap();
Some((str, port as i32))
},
_ => None
}
}
pub fn set_hdfs_builder(&self, builder: *mut hdfsBuilder) -> bool
{
if unsafe { nmdConfigureHdfsBuilder(self.cluster, builder) } == 0
{ true } else { false }
}
} |
use std::{iter, thread};
use std::sync::atomic::{AtomicIsize, AtomicUsize, AtomicBool, Ordering};
use comm;
use config;
use player::Player;
use field::{Pos, Field};
use trajectories_pruning::TrajectoriesPruning;
use common;
const MINIMAX_STR: &'static str = "minimax";
fn alpha_beta(field: &mut Field, depth: u32, last_pos: Pos, player: Player, trajectories_pruning: &TrajectoriesPruning, mut alpha: i32, beta: i32, empty_board: &mut Vec<u32>) -> i32 {
let enemy = player.next();
if common::is_last_move_stupid(field, last_pos, enemy) {
return i32::max_value();
}
if depth == 0 {
return field.score(player);
}
let moves = trajectories_pruning.calculate_moves(empty_board);
if moves.is_empty() {
return field.score(player);
}
for pos in moves {
field.put_point(pos, player);
if common::is_penult_move_stuped(field) {
field.undo();
return i32::max_value();
}
let next_trajectories_pruning = TrajectoriesPruning::from_last(field, enemy, depth - 1, empty_board, trajectories_pruning, pos);
let mut cur_estimation = -alpha_beta(field, depth - 1, pos, enemy, &next_trajectories_pruning, -alpha - 1, -alpha, empty_board);
if cur_estimation > alpha && cur_estimation < beta {
cur_estimation = -alpha_beta(field, depth - 1, pos, enemy, &next_trajectories_pruning, -beta, -cur_estimation, empty_board);
}
field.undo();
if cur_estimation > alpha {
alpha = cur_estimation;
if alpha >= beta {
break;
}
}
}
alpha
}
fn alpha_beta_parallel(field: &mut Field, player: Player, depth: u32, alpha: i32, beta: i32, trajectories_pruning: &TrajectoriesPruning, empty_board: &mut Vec<u32>, best_move: &mut Option<Pos>, should_stop: &AtomicBool) -> i32 {
info!(target: MINIMAX_STR, "Starting parellel alpha beta with depth {}, player {} and beta {}.", depth, player, beta);
if depth == 0 || should_stop.load(Ordering::Relaxed) {
*best_move = None;
return field.score(player);
}
let moves = trajectories_pruning.calculate_moves(empty_board);
debug!(target: MINIMAX_STR, "Moves in consideration: {:?}.", moves.iter().map(|&pos| (field.to_x(pos), field.to_y(pos))).collect::<Vec<(u32, u32)>>());
if moves.is_empty() || should_stop.load(Ordering::Relaxed) {
*best_move = None;
return field.score(player);
}
let (producer, consumer) = comm::spmc::unbounded::new();
for pos in moves {
producer.send(pos).ok();
}
let threads_count = config::threads_count();
let atomic_alpha = AtomicIsize::new(alpha as isize);
let atomic_best_move = AtomicUsize::new(0);
let mut guards = Vec::with_capacity(threads_count);
for _ in 0 .. threads_count {
guards.push(thread::scoped(|| {
let local_consumer = consumer.clone();
let mut local_field = field.clone();
let mut local_empty_board = iter::repeat(0u32).take(field.length()).collect();
let enemy = player.next();
while let Some(pos) = local_consumer.recv_async().ok() {
if should_stop.load(Ordering::Relaxed) {
debug!(target: MINIMAX_STR, "Time-out!");
break;
}
local_field.put_point(pos, player);
let next_trajectories_pruning = TrajectoriesPruning::from_last(&mut local_field, enemy, depth - 1, &mut local_empty_board, &trajectories_pruning, pos);
if should_stop.load(Ordering::Relaxed) {
debug!(target: MINIMAX_STR, "Time-out!");
break;
}
let cur_alpha = atomic_alpha.load(Ordering::SeqCst) as i32;
if cur_alpha >= beta {
break;
}
let mut cur_estimation = -alpha_beta(&mut local_field, depth - 1, pos, enemy, &next_trajectories_pruning, -cur_alpha - 1, -cur_alpha, &mut local_empty_board);
if cur_estimation > cur_alpha {
if !should_stop.load(Ordering::Relaxed) {
cur_estimation = -alpha_beta(&mut local_field, depth - 1, pos, enemy, &next_trajectories_pruning, -beta, -cur_estimation, &mut local_empty_board);
} else {
debug!(target: MINIMAX_STR, "Time-out! Next estimation ma be approximated.");
}
}
local_field.undo();
loop {
let last_pos = atomic_best_move.load(Ordering::SeqCst);
let last_alpha = atomic_alpha.load(Ordering::SeqCst);
if cur_estimation > last_alpha as i32 {
if atomic_alpha.compare_and_swap(last_alpha, cur_estimation as isize, Ordering::SeqCst) == last_alpha && atomic_best_move.compare_and_swap(last_pos, pos, Ordering::SeqCst) == last_pos {
debug!(target: MINIMAX_STR, "{} for move ({}, {}) is {}.", if cur_estimation < beta { "Estimation" } else { "Lower bound of estimation" }, field.to_x(pos), field.to_y(pos), cur_estimation);
break;
}
} else {
debug!(target: MINIMAX_STR, "{} for move ({}, {}) is {}.", if cur_estimation > cur_alpha { if cur_estimation < beta { "Estimation" } else { "Lower bound of estimation" } } else { "Upper bound of estimation" }, field.to_x(pos), field.to_y(pos), cur_estimation);
break;
}
}
}
}));
}
drop(guards);
let result = atomic_best_move.load(Ordering::SeqCst);
if result != 0 {
info!(target: MINIMAX_STR, "Best move is ({}, {}).", field.to_x(result), field.to_y(result));
*best_move = Some(result);
} else {
info!(target: MINIMAX_STR, "Best move is not found.");
*best_move = None;
}
let cur_alpha = atomic_alpha.load(Ordering::SeqCst);
info!(target: MINIMAX_STR, "Estimation is {}.", cur_alpha);
cur_alpha as i32
}
pub fn minimax(field: &mut Field, player: Player, depth: u32) -> Option<Pos> {
info!(target: MINIMAX_STR, "Starting minimax with depth {} and player {}.", depth, player);
if depth == 0 {
return None;
}
let should_stop = AtomicBool::new(false);
let mut empty_board = iter::repeat(0u32).take(field.length()).collect();
let trajectories_pruning = TrajectoriesPruning::new(field, player, depth, &mut empty_board);
let mut best_move = None;
info!(target: MINIMAX_STR, "Calculating of our estimation. Player is {}", player);
let estimation = alpha_beta_parallel(field, player, depth, i32::min_value() + 1, i32::max_value(), &trajectories_pruning, &mut empty_board, &mut best_move, &should_stop);
let enemy = player.next();
let mut enemy_best_move = best_move;
let enemy_trajectories_pruning = TrajectoriesPruning::dec_exists(&field, enemy, depth - 1, &mut empty_board, &trajectories_pruning);
info!(target: MINIMAX_STR, "Calculating of enemy estimation with upper bound {}. Player is {}", -estimation + 1, enemy);
if -alpha_beta_parallel(field, enemy, depth - 1, -estimation, -estimation + 1, &enemy_trajectories_pruning, &mut empty_board, &mut enemy_best_move, &should_stop) < estimation {
info!(target: MINIMAX_STR, "Estimation is greater than enemy estimation. So the best move is {:?}, estimation is {}.", best_move.map(|pos| (field.to_x(pos), field.to_y(pos))), estimation);
best_move
} else {
info!(target: MINIMAX_STR, "Estimation is less than or equal enemy estimation. So all moves have the same estimation {}.", estimation);
None
}
}
pub fn minimax_with_time(field: &mut Field, player: Player, time: u32) -> Option<Pos> {
let should_stop = AtomicBool::new(false);
let guard = thread::scoped(|| {
thread::sleep_ms(time);
should_stop.store(true, Ordering::Relaxed);
});
let enemy = player.next();
let mut depth = 1;
let mut best_move = None;
let mut cur_best_move = None;
let mut enemy_best_move = None;
let mut empty_board = iter::repeat(0u32).take(field.length()).collect();
let mut trajectories_pruning = TrajectoriesPruning::new(field, player, depth, &mut empty_board);
while !should_stop.load(Ordering::Relaxed) {
let estimation = alpha_beta_parallel(field, player, depth, i32::min_value() + 1, i32::max_value(), &trajectories_pruning, &mut empty_board, &mut cur_best_move, &should_stop);
if should_stop.load(Ordering::Relaxed) {
break;
}
let enemy_trajectories_pruning = TrajectoriesPruning::dec_exists(&field, enemy, depth - 1, &mut empty_board, &trajectories_pruning);
if should_stop.load(Ordering::Relaxed) {
break;
}
best_move = if -alpha_beta_parallel(field, enemy, depth - 1, -estimation, -estimation + 1, &enemy_trajectories_pruning, &mut empty_board, &mut enemy_best_move, &should_stop) < estimation {
cur_best_move
} else {
None
};
if should_stop.load(Ordering::Relaxed) {
break;
}
depth += 1;
trajectories_pruning = TrajectoriesPruning::inc_exists(field, player, depth, &mut empty_board, &trajectories_pruning);
}
drop(guard);
best_move
}
Small improvement.
use std::{iter, thread};
use std::sync::atomic::{AtomicIsize, AtomicUsize, AtomicBool, Ordering};
use comm;
use config;
use player::Player;
use field::{Pos, Field};
use trajectories_pruning::TrajectoriesPruning;
use common;
const MINIMAX_STR: &'static str = "minimax";
fn alpha_beta(field: &mut Field, depth: u32, last_pos: Pos, player: Player, trajectories_pruning: &TrajectoriesPruning, mut alpha: i32, beta: i32, empty_board: &mut Vec<u32>) -> i32 {
let enemy = player.next();
if common::is_last_move_stupid(field, last_pos, enemy) {
return i32::max_value();
}
if depth == 0 {
return field.score(player);
}
let moves = trajectories_pruning.calculate_moves(empty_board);
if moves.is_empty() {
return field.score(player);
}
for pos in moves {
field.put_point(pos, player);
if common::is_penult_move_stuped(field) {
field.undo();
return i32::max_value();
}
let next_trajectories_pruning = TrajectoriesPruning::from_last(field, enemy, depth - 1, empty_board, trajectories_pruning, pos);
let mut cur_estimation = -alpha_beta(field, depth - 1, pos, enemy, &next_trajectories_pruning, -alpha - 1, -alpha, empty_board);
if cur_estimation > alpha && cur_estimation < beta {
cur_estimation = -alpha_beta(field, depth - 1, pos, enemy, &next_trajectories_pruning, -beta, -cur_estimation, empty_board);
}
field.undo();
if cur_estimation > alpha {
alpha = cur_estimation;
if alpha >= beta {
break;
}
}
}
alpha
}
fn alpha_beta_parallel(field: &mut Field, player: Player, depth: u32, alpha: i32, beta: i32, trajectories_pruning: &TrajectoriesPruning, empty_board: &mut Vec<u32>, best_move: &mut Option<Pos>, should_stop: &AtomicBool) -> i32 {
info!(target: MINIMAX_STR, "Starting parellel alpha beta with depth {}, player {} and beta {}.", depth, player, beta);
if depth == 0 || should_stop.load(Ordering::Relaxed) {
*best_move = None;
return field.score(player);
}
let moves = trajectories_pruning.calculate_moves(empty_board);
debug!(target: MINIMAX_STR, "Moves in consideration: {:?}.", moves.iter().map(|&pos| (field.to_x(pos), field.to_y(pos))).collect::<Vec<(u32, u32)>>());
if moves.is_empty() || should_stop.load(Ordering::Relaxed) {
*best_move = None;
return field.score(player);
}
let (producer, consumer) = comm::spmc::unbounded::new();
for pos in moves {
producer.send(pos).ok();
}
let threads_count = config::threads_count();
let atomic_alpha = AtomicIsize::new(alpha as isize);
let atomic_best_move = AtomicUsize::new(0);
let mut guards = Vec::with_capacity(threads_count);
for _ in 0 .. threads_count {
guards.push(thread::scoped(|| {
let local_consumer = consumer.clone();
let mut local_field = field.clone();
let mut local_empty_board = iter::repeat(0u32).take(field.length()).collect();
let enemy = player.next();
while let Some(pos) = local_consumer.recv_async().ok() {
if should_stop.load(Ordering::Relaxed) {
debug!(target: MINIMAX_STR, "Time-out!");
break;
}
local_field.put_point(pos, player);
let next_trajectories_pruning = TrajectoriesPruning::from_last(&mut local_field, enemy, depth - 1, &mut local_empty_board, &trajectories_pruning, pos);
if should_stop.load(Ordering::Relaxed) {
debug!(target: MINIMAX_STR, "Time-out!");
break;
}
let cur_alpha = atomic_alpha.load(Ordering::SeqCst) as i32;
if cur_alpha >= beta {
break;
}
let mut cur_estimation = -alpha_beta(&mut local_field, depth - 1, pos, enemy, &next_trajectories_pruning, -cur_alpha - 1, -cur_alpha, &mut local_empty_board);
if cur_estimation > cur_alpha {
if !should_stop.load(Ordering::Relaxed) {
cur_estimation = -alpha_beta(&mut local_field, depth - 1, pos, enemy, &next_trajectories_pruning, -beta, -cur_estimation, &mut local_empty_board);
} else {
debug!(target: MINIMAX_STR, "Time-out! Next estimation ma be approximated.");
}
}
local_field.undo();
loop {
let last_pos = atomic_best_move.load(Ordering::SeqCst);
let last_alpha = atomic_alpha.load(Ordering::SeqCst);
if cur_estimation > last_alpha as i32 {
if atomic_alpha.compare_and_swap(last_alpha, cur_estimation as isize, Ordering::SeqCst) == last_alpha && atomic_best_move.compare_and_swap(last_pos, pos, Ordering::SeqCst) == last_pos {
debug!(target: MINIMAX_STR, "{} for move ({}, {}) is {}.", if cur_estimation < beta { "Estimation" } else { "Lower bound of estimation" }, field.to_x(pos), field.to_y(pos), cur_estimation);
break;
}
} else {
debug!(target: MINIMAX_STR, "{} for move ({}, {}) is {}.", if cur_estimation > cur_alpha { if cur_estimation < beta { "Estimation" } else { "Lower bound of estimation" } } else { "Upper bound of estimation" }, field.to_x(pos), field.to_y(pos), cur_estimation);
break;
}
}
}
}));
}
drop(guards);
let result = atomic_best_move.load(Ordering::SeqCst);
if result != 0 {
info!(target: MINIMAX_STR, "Best move is ({}, {}).", field.to_x(result), field.to_y(result));
*best_move = Some(result);
} else {
info!(target: MINIMAX_STR, "Best move is not found.");
*best_move = None;
}
let cur_alpha = atomic_alpha.load(Ordering::SeqCst);
info!(target: MINIMAX_STR, "Estimation is {}.", cur_alpha);
cur_alpha as i32
}
pub fn minimax(field: &mut Field, player: Player, depth: u32) -> Option<Pos> {
info!(target: MINIMAX_STR, "Starting minimax with depth {} and player {}.", depth, player);
if depth == 0 {
return None;
}
let should_stop = AtomicBool::new(false);
let mut empty_board = iter::repeat(0u32).take(field.length()).collect();
let trajectories_pruning = TrajectoriesPruning::new(field, player, depth, &mut empty_board);
let mut best_move = None;
info!(target: MINIMAX_STR, "Calculating of our estimation. Player is {}", player);
let estimation = alpha_beta_parallel(field, player, depth, i32::min_value() + 1, i32::max_value(), &trajectories_pruning, &mut empty_board, &mut best_move, &should_stop);
let enemy = player.next();
let mut enemy_best_move = best_move;
let enemy_trajectories_pruning = TrajectoriesPruning::dec_exists(&field, enemy, depth - 1, &mut empty_board, &trajectories_pruning);
info!(target: MINIMAX_STR, "Calculating of enemy estimation with upper bound {}. Player is {}", -estimation + 1, enemy);
if -alpha_beta_parallel(field, enemy, depth - 1, -estimation, -estimation + 1, &enemy_trajectories_pruning, &mut empty_board, &mut enemy_best_move, &should_stop) < estimation {
info!(target: MINIMAX_STR, "Estimation is greater than enemy estimation. So the best move is {:?}, estimation is {}.", best_move.map(|pos| (field.to_x(pos), field.to_y(pos))), estimation);
best_move
} else {
info!(target: MINIMAX_STR, "Estimation is less than or equal enemy estimation. So all moves have the same estimation {}.", estimation);
None
}
}
pub fn minimax_with_time(field: &mut Field, player: Player, time: u32) -> Option<Pos> {
let should_stop = AtomicBool::new(false);
let guard = thread::scoped(|| {
thread::sleep_ms(time);
should_stop.store(true, Ordering::Relaxed);
});
let enemy = player.next();
let mut depth = 1;
let mut best_move = None;
let mut cur_best_move = None;
let mut enemy_best_move = None;
let mut empty_board = iter::repeat(0u32).take(field.length()).collect();
let mut trajectories_pruning = TrajectoriesPruning::new(field, player, depth, &mut empty_board);
while !should_stop.load(Ordering::Relaxed) {
let estimation = alpha_beta_parallel(field, player, depth, i32::min_value() + 1, i32::max_value(), &trajectories_pruning, &mut empty_board, &mut cur_best_move, &should_stop);
if should_stop.load(Ordering::Relaxed) {
break;
}
let enemy_trajectories_pruning = TrajectoriesPruning::dec_exists(&field, enemy, depth - 1, &mut empty_board, &trajectories_pruning);
if should_stop.load(Ordering::Relaxed) {
break;
}
best_move = if -alpha_beta_parallel(field, enemy, depth - 1, -estimation, -estimation + 1, &enemy_trajectories_pruning, &mut empty_board, &mut enemy_best_move, &should_stop) < estimation || should_stop.load(Ordering::Relaxed) {
cur_best_move
} else {
None
};
if should_stop.load(Ordering::Relaxed) {
break;
}
depth += 1;
trajectories_pruning = TrajectoriesPruning::inc_exists(field, player, depth, &mut empty_board, &trajectories_pruning);
}
drop(guard);
best_move
}
|
// Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! network deals abstracts away the manipulation of network device and
//! interface unit files. All that is left is to write the resulting string to
//! the necessary unit.
use std::net::IpAddr;
use pnet::util::MacAddr;
use std::string::String;
use std::string::ToString;
use ipnetwork::IpNetwork;
use errors::*;
pub const BONDING_MODE_BALANCE_RR: u32 = 0;
pub const BONDING_MODE_ACTIVE_BACKUP: u32 = 1;
pub const BONDING_MODE_BALANCE_XOR: u32 = 2;
pub const BONDING_MODE_BROADCAST: u32 = 3;
pub const BONDING_MODE_LACP: u32 = 4;
pub const BONDING_MODE_BALANCE_TLB: u32 = 5;
pub const BONDING_MODE_BALANCE_ALB: u32 = 6;
const BONDING_MODES: [(u32,&str); 7] = [
(BONDING_MODE_BALANCE_RR,"balance-rr"),
(BONDING_MODE_ACTIVE_BACKUP,"active-backup"),
(BONDING_MODE_BALANCE_XOR,"balance-xor"),
(BONDING_MODE_BROADCAST,"broadcast"),
(BONDING_MODE_LACP,"802.3ad"),
(BONDING_MODE_BALANCE_TLB,"balance-tlb"),
(BONDING_MODE_BALANCE_ALB,"balance-alb"),
];
pub fn bonding_mode_to_string(mode: &u32) -> Result<String> {
for &(m,s) in &BONDING_MODES {
if m == *mode {
return Ok(s.to_owned())
}
}
Err(format!("no such bonding mode: {}", mode).into())
}
#[derive(Clone, Copy, Debug)]
pub struct NetworkRoute {
pub destination: IpNetwork,
pub gateway: IpAddr,
}
/// for naming purposes an interface needs either a name or an address.
/// it can have both. but it can't have neither.
/// there isn't really a way to express this in the type system
/// so we just panic! if it's not what we expected.
/// I guess that there aren't really type systems with inclusive disjunction
/// so it's not really that big of a deal.
#[derive(Clone, Debug)]
pub struct Interface {
pub name: Option<String>,
pub mac_address: Option<MacAddr>,
pub priority: Option<u32>,
pub nameservers: Vec<IpAddr>,
pub ip_addresses: Vec<IpNetwork>,
pub routes: Vec<NetworkRoute>,
pub bond: Option<String>,
}
#[derive(Clone, Debug)]
pub struct Section {
pub name: String,
pub attributes: Vec<(String, String)>,
}
#[derive(Clone, Debug)]
pub struct Device {
pub name: String,
pub kind: String,
pub mac_address: MacAddr,
pub priority: Option<u32>,
pub sections: Vec<Section>
}
impl Interface {
pub fn unit_name(&self) -> String {
format!("{:02}-{}.network",
self.priority.unwrap_or(10),
self.name.clone().unwrap_or_else(
// needs to be a lambda or we panic immediately
// yay, manual thunking!
||self.mac_address.unwrap_or_else(
||panic!("interface needs either name or mac address (or both)")
).to_string()
))
}
pub fn config(&self) -> String {
let mut config = String::new();
// [Match] section
config.push_str("[Match]\n");
self.name.clone().map(|name| config.push_str(&format!("Name={}\n", name)));
self.mac_address.map(|mac| config.push_str(&format!("MACAddress={}\n", mac)));
// [Network] section
config.push_str("\n[Network]\n");
for ns in &self.nameservers {
config.push_str(&format!("DNS={}\n", ns))
}
self.bond.clone().map(|bond| config.push_str(&format!("Bond={}\n", bond)));
// [Address] sections
for addr in &self.ip_addresses {
config.push_str(&format!("\n[Address]\nAddress={}\n", addr));
}
// [Route] sections
for route in &self.routes {
config.push_str(&format!("\n[Route]\nDestination={}\nGateway={}\n", route.destination, route.gateway));
}
config
}
}
impl Device {
pub fn unit_name(&self) -> String {
format!("{:02}-{}.netdev", self.priority.unwrap_or(10), self.name)
}
pub fn config(&self) -> String {
let mut config = String::new();
// [NetDev] section
config.push_str("[NetDev]\n");
config.push_str(&format!("Name={}\n", self.name));
config.push_str(&format!("Kind={}\n", self.kind));
config.push_str(&format!("MACAddress={}\n", self.mac_address));
// custom sections
for section in &self.sections {
config.push_str(&format!("\n[{}]\n", section.name));
for attr in §ion.attributes {
config.push_str(&format!("{}={}\n", attr.0, attr.1));
}
}
config
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::net::{Ipv4Addr, Ipv6Addr};
use ipnetwork::{Ipv4Network,Ipv6Network};
#[test]
fn mac_addr_display() {
let m = MacAddr(0xf4,0x00,0x34,0x09,0x73,0xee);
assert_eq!(m.to_string(), "f4:00:34:09:73:ee");
}
#[test]
fn interface_unit_name() {
let is = vec![
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-lo.network"),
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: None,
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "10-lo.network"),
(Interface {
name: None,
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-00:00:00:00:00:00.network"),
(Interface {
name: Some(String::from("lo")),
mac_address: None,
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-lo.network"),
];
for (i, s) in is {
assert_eq!(i.unit_name(), s);
}
}
#[test]
#[should_panic]
fn interface_unit_name_no_name_no_mac() {
let i = Interface {
name: None,
mac_address: None,
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
};
let _name = i.unit_name();
}
#[test]
fn device_unit_name() {
let ds = vec![
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![],
}, "20-vlan0.netdev"),
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: None,
sections: vec![],
}, "10-vlan0.netdev"),
];
for (d, s) in ds {
assert_eq!(d.unit_name(), s);
}
}
#[test]
fn interface_config() {
let is = vec![
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)),
],
ip_addresses: vec![
IpNetwork::V4(Ipv4Network::new(
Ipv4Addr::new(127, 0, 0, 1),
8
).unwrap()),
IpNetwork::V6(Ipv6Network::new(
Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1),
128
).unwrap()),
],
routes: vec![
NetworkRoute {
destination: IpNetwork::V4(Ipv4Network::new(
Ipv4Addr::new(127, 0, 0, 1),
8
).unwrap()),
gateway: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
}
],
bond: Some(String::from("james")),
}, "[Match]
Name=lo
MACAddress=00:00:00:00:00:00
[Network]
DNS=127.0.0.1
DNS=::1
Bond=james
[Address]
Address=127.0.0.1/8
[Address]
Address=::1/128
[Route]
Destination=127.0.0.1/8
Gateway=127.0.0.1
"),
// this isn't really a valid interface object, but it's testing
// the minimum possible configuration for all peices at the same
// time, so I'll allow it. (sdemos)
(Interface {
name: None,
mac_address: None,
priority: None,
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "[Match]
[Network]
")
];
for (i, s) in is {
assert_eq!(i.config(), s);
}
}
#[test]
fn device_config() {
let ds = vec![
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![
Section {
name: String::from("Test"),
attributes: vec![
(String::from("foo"), String::from("bar")),
(String::from("oingo"), String::from("boingo")),
]
},
Section {
name: String::from("Empty"),
attributes: vec![],
}
],
}, "[NetDev]
Name=vlan0
Kind=vlan
MACAddress=00:00:00:00:00:00
[Test]
foo=bar
oingo=boingo
[Empty]
"),
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![],
}, "[NetDev]
Name=vlan0
Kind=vlan
MACAddress=00:00:00:00:00:00
")
];
for (d, s) in ds {
assert_eq!(d.config(), s);
}
}
}
network: add Eq and PartialEq derives to structs
these structs are trivially comparable, so let's provide the
implementations.
// Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! network deals abstracts away the manipulation of network device and
//! interface unit files. All that is left is to write the resulting string to
//! the necessary unit.
use std::net::IpAddr;
use pnet::util::MacAddr;
use std::string::String;
use std::string::ToString;
use ipnetwork::IpNetwork;
use errors::*;
pub const BONDING_MODE_BALANCE_RR: u32 = 0;
pub const BONDING_MODE_ACTIVE_BACKUP: u32 = 1;
pub const BONDING_MODE_BALANCE_XOR: u32 = 2;
pub const BONDING_MODE_BROADCAST: u32 = 3;
pub const BONDING_MODE_LACP: u32 = 4;
pub const BONDING_MODE_BALANCE_TLB: u32 = 5;
pub const BONDING_MODE_BALANCE_ALB: u32 = 6;
const BONDING_MODES: [(u32,&str); 7] = [
(BONDING_MODE_BALANCE_RR,"balance-rr"),
(BONDING_MODE_ACTIVE_BACKUP,"active-backup"),
(BONDING_MODE_BALANCE_XOR,"balance-xor"),
(BONDING_MODE_BROADCAST,"broadcast"),
(BONDING_MODE_LACP,"802.3ad"),
(BONDING_MODE_BALANCE_TLB,"balance-tlb"),
(BONDING_MODE_BALANCE_ALB,"balance-alb"),
];
pub fn bonding_mode_to_string(mode: &u32) -> Result<String> {
for &(m,s) in &BONDING_MODES {
if m == *mode {
return Ok(s.to_owned())
}
}
Err(format!("no such bonding mode: {}", mode).into())
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct NetworkRoute {
pub destination: IpNetwork,
pub gateway: IpAddr,
}
/// for naming purposes an interface needs either a name or an address.
/// it can have both. but it can't have neither.
/// there isn't really a way to express this in the type system
/// so we just panic! if it's not what we expected.
/// I guess that there aren't really type systems with inclusive disjunction
/// so it's not really that big of a deal.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Interface {
pub name: Option<String>,
pub mac_address: Option<MacAddr>,
pub priority: Option<u32>,
pub nameservers: Vec<IpAddr>,
pub ip_addresses: Vec<IpNetwork>,
pub routes: Vec<NetworkRoute>,
pub bond: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Section {
pub name: String,
pub attributes: Vec<(String, String)>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Device {
pub name: String,
pub kind: String,
pub mac_address: MacAddr,
pub priority: Option<u32>,
pub sections: Vec<Section>
}
impl Interface {
pub fn unit_name(&self) -> String {
format!("{:02}-{}.network",
self.priority.unwrap_or(10),
self.name.clone().unwrap_or_else(
// needs to be a lambda or we panic immediately
// yay, manual thunking!
||self.mac_address.unwrap_or_else(
||panic!("interface needs either name or mac address (or both)")
).to_string()
))
}
pub fn config(&self) -> String {
let mut config = String::new();
// [Match] section
config.push_str("[Match]\n");
self.name.clone().map(|name| config.push_str(&format!("Name={}\n", name)));
self.mac_address.map(|mac| config.push_str(&format!("MACAddress={}\n", mac)));
// [Network] section
config.push_str("\n[Network]\n");
for ns in &self.nameservers {
config.push_str(&format!("DNS={}\n", ns))
}
self.bond.clone().map(|bond| config.push_str(&format!("Bond={}\n", bond)));
// [Address] sections
for addr in &self.ip_addresses {
config.push_str(&format!("\n[Address]\nAddress={}\n", addr));
}
// [Route] sections
for route in &self.routes {
config.push_str(&format!("\n[Route]\nDestination={}\nGateway={}\n", route.destination, route.gateway));
}
config
}
}
impl Device {
pub fn unit_name(&self) -> String {
format!("{:02}-{}.netdev", self.priority.unwrap_or(10), self.name)
}
pub fn config(&self) -> String {
let mut config = String::new();
// [NetDev] section
config.push_str("[NetDev]\n");
config.push_str(&format!("Name={}\n", self.name));
config.push_str(&format!("Kind={}\n", self.kind));
config.push_str(&format!("MACAddress={}\n", self.mac_address));
// custom sections
for section in &self.sections {
config.push_str(&format!("\n[{}]\n", section.name));
for attr in §ion.attributes {
config.push_str(&format!("{}={}\n", attr.0, attr.1));
}
}
config
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::net::{Ipv4Addr, Ipv6Addr};
use ipnetwork::{Ipv4Network,Ipv6Network};
#[test]
fn mac_addr_display() {
let m = MacAddr(0xf4,0x00,0x34,0x09,0x73,0xee);
assert_eq!(m.to_string(), "f4:00:34:09:73:ee");
}
#[test]
fn interface_unit_name() {
let is = vec![
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-lo.network"),
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: None,
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "10-lo.network"),
(Interface {
name: None,
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-00:00:00:00:00:00.network"),
(Interface {
name: Some(String::from("lo")),
mac_address: None,
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "20-lo.network"),
];
for (i, s) in is {
assert_eq!(i.unit_name(), s);
}
}
#[test]
#[should_panic]
fn interface_unit_name_no_name_no_mac() {
let i = Interface {
name: None,
mac_address: None,
priority: Some(20),
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
};
let _name = i.unit_name();
}
#[test]
fn device_unit_name() {
let ds = vec![
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![],
}, "20-vlan0.netdev"),
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: None,
sections: vec![],
}, "10-vlan0.netdev"),
];
for (d, s) in ds {
assert_eq!(d.unit_name(), s);
}
}
#[test]
fn interface_config() {
let is = vec![
(Interface {
name: Some(String::from("lo")),
mac_address: Some(MacAddr(0,0,0,0,0,0)),
priority: Some(20),
nameservers: vec![
IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1)),
],
ip_addresses: vec![
IpNetwork::V4(Ipv4Network::new(
Ipv4Addr::new(127, 0, 0, 1),
8
).unwrap()),
IpNetwork::V6(Ipv6Network::new(
Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 1),
128
).unwrap()),
],
routes: vec![
NetworkRoute {
destination: IpNetwork::V4(Ipv4Network::new(
Ipv4Addr::new(127, 0, 0, 1),
8
).unwrap()),
gateway: IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
}
],
bond: Some(String::from("james")),
}, "[Match]
Name=lo
MACAddress=00:00:00:00:00:00
[Network]
DNS=127.0.0.1
DNS=::1
Bond=james
[Address]
Address=127.0.0.1/8
[Address]
Address=::1/128
[Route]
Destination=127.0.0.1/8
Gateway=127.0.0.1
"),
// this isn't really a valid interface object, but it's testing
// the minimum possible configuration for all peices at the same
// time, so I'll allow it. (sdemos)
(Interface {
name: None,
mac_address: None,
priority: None,
nameservers: vec![],
ip_addresses: vec![],
routes: vec![],
bond: None,
}, "[Match]
[Network]
")
];
for (i, s) in is {
assert_eq!(i.config(), s);
}
}
#[test]
fn device_config() {
let ds = vec![
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![
Section {
name: String::from("Test"),
attributes: vec![
(String::from("foo"), String::from("bar")),
(String::from("oingo"), String::from("boingo")),
]
},
Section {
name: String::from("Empty"),
attributes: vec![],
}
],
}, "[NetDev]
Name=vlan0
Kind=vlan
MACAddress=00:00:00:00:00:00
[Test]
foo=bar
oingo=boingo
[Empty]
"),
(Device {
name: String::from("vlan0"),
kind: String::from("vlan"),
mac_address: MacAddr(0,0,0,0,0,0),
priority: Some(20),
sections: vec![],
}, "[NetDev]
Name=vlan0
Kind=vlan
MACAddress=00:00:00:00:00:00
")
];
for (d, s) in ds {
assert_eq!(d.config(), s);
}
}
}
|
//! Create and manage user-defined networks that containers can be attached to.
//!
//! API Reference: <https://docs.docker.com/engine/api/v1.41/#tag/Network>
use std::{
collections::{BTreeMap, HashMap},
hash::Hash,
};
use hyper::Body;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use url::form_urlencoded;
use crate::{
docker::Docker,
errors::{Error, Result},
};
/// Interface for docker network
pub struct Networks<'docker> {
docker: &'docker Docker,
}
impl<'docker> Networks<'docker> {
/// Exports an interface for interacting with docker Networks
pub fn new(docker: &'docker Docker) -> Self {
Networks { docker }
}
/// List the docker networks on the current docker host
pub async fn list(
&self,
opts: &NetworkListOptions,
) -> Result<Vec<NetworkInfo>> {
let mut path = vec!["/networks".to_owned()];
if let Some(query) = opts.serialize() {
path.push(query);
}
self.docker.get_json(&path.join("?")).await
}
/// Returns a reference to a set of operations available to a specific network instance
pub fn get<S>(
&self,
id: S,
) -> Network<'docker>
where
S: Into<String>,
{
Network::new(self.docker, id)
}
/// Create a new Network instance
pub async fn create(
&self,
opts: &NetworkCreateOptions,
) -> Result<NetworkCreateInfo> {
let body: Body = opts.serialize()?.into();
let path = vec!["/networks/create".to_owned()];
self.docker
.post_json(&path.join("?"), Some((body, mime::APPLICATION_JSON)))
.await
}
}
/// Interface for accessing and manipulating a docker network
pub struct Network<'docker> {
docker: &'docker Docker,
id: String,
}
impl<'docker> Network<'docker> {
/// Exports an interface exposing operations against a network instance
pub fn new<S>(
docker: &'docker Docker,
id: S,
) -> Self
where
S: Into<String>,
{
Network {
docker,
id: id.into(),
}
}
/// a getter for the Network id
pub fn id(&self) -> &str {
&self.id
}
/// Inspects the current docker network instance's details
pub async fn inspect(&self) -> Result<NetworkInfo> {
self.docker
.get_json(&format!("/networks/{}", self.id)[..])
.await
}
/// Delete the network instance
pub async fn delete(&self) -> Result<()> {
self.docker
.delete(&format!("/networks/{}", self.id)[..])
.await?;
Ok(())
}
/// Connect container to network
pub async fn connect(
&self,
opts: &ContainerConnectionOptions,
) -> Result<()> {
self.do_connection("connect", opts).await
}
/// Disconnect container to network
pub async fn disconnect(
&self,
opts: &ContainerConnectionOptions,
) -> Result<()> {
self.do_connection("disconnect", opts).await
}
async fn do_connection(
&self,
segment: &str,
opts: &ContainerConnectionOptions,
) -> Result<()> {
let body: Body = opts.serialize()?.into();
self.docker
.post(
&format!("/networks/{}/{}", self.id, segment)[..],
Some((body, mime::APPLICATION_JSON)),
)
.await?;
Ok(())
}
}
/// Options for filtering networks list results
#[derive(Default, Debug)]
pub struct NetworkListOptions {
params: HashMap<&'static str, String>,
}
impl NetworkListOptions {
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Option<String> {
if self.params.is_empty() {
None
} else {
Some(
form_urlencoded::Serializer::new(String::new())
.extend_pairs(&self.params)
.finish(),
)
}
}
}
/// Interface for creating new docker network
#[derive(Serialize, Debug)]
pub struct NetworkCreateOptions {
params: HashMap<&'static str, Value>,
}
impl NetworkCreateOptions {
/// return a new instance of a builder for options
pub fn builder(name: &str) -> NetworkCreateOptionsBuilder {
NetworkCreateOptionsBuilder::new(name)
}
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Result<String> {
serde_json::to_string(&self.params).map_err(Error::from)
}
pub fn parse_from<'a, K, V>(
&self,
params: &'a HashMap<K, V>,
body: &mut serde_json::Map<String, Value>,
) where
&'a HashMap<K, V>: IntoIterator,
K: ToString + Eq + Hash,
V: Serialize,
{
for (k, v) in params.iter() {
let key = k.to_string();
let value = serde_json::to_value(v).unwrap();
body.insert(key, value);
}
}
}
#[derive(Default)]
pub struct NetworkCreateOptionsBuilder {
params: HashMap<&'static str, Value>,
}
impl NetworkCreateOptionsBuilder {
pub(crate) fn new(name: &str) -> Self {
let mut params = HashMap::new();
params.insert("Name", json!(name));
NetworkCreateOptionsBuilder { params }
}
pub fn driver(
&mut self,
name: &str,
) -> &mut Self {
if !name.is_empty() {
self.params.insert("Driver", json!(name));
}
self
}
pub fn label(
&mut self,
labels: HashMap<String, String>,
) -> &mut Self {
self.params.insert("Labels", json!(labels));
self
}
pub fn build(&self) -> NetworkCreateOptions {
NetworkCreateOptions {
params: self.params.clone(),
}
}
}
/// Interface for connect container to network
#[derive(Serialize, Debug)]
pub struct ContainerConnectionOptions {
params: HashMap<&'static str, Value>,
}
impl ContainerConnectionOptions {
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Result<String> {
serde_json::to_string(&self.params).map_err(Error::from)
}
pub fn parse_from<'a, K, V>(
&self,
params: &'a HashMap<K, V>,
body: &mut BTreeMap<String, Value>,
) where
&'a HashMap<K, V>: IntoIterator,
K: ToString + Eq + Hash,
V: Serialize,
{
for (k, v) in params.iter() {
let key = k.to_string();
let value = serde_json::to_value(v).unwrap();
body.insert(key, value);
}
}
/// return a new instance of a builder for options
pub fn builder(container_id: &str) -> ContainerConnectionOptionsBuilder {
ContainerConnectionOptionsBuilder::new(container_id)
}
}
#[derive(Default)]
pub struct ContainerConnectionOptionsBuilder {
params: HashMap<&'static str, Value>,
}
impl ContainerConnectionOptionsBuilder {
pub(crate) fn new(container_id: &str) -> Self {
let mut params = HashMap::new();
params.insert("Container", json!(container_id));
ContainerConnectionOptionsBuilder { params }
}
pub fn aliases(
&mut self,
aliases: Vec<&str>,
) -> &mut Self {
self.params
.insert("EndpointConfig", json!({ "Aliases": json!(aliases) }));
self
}
pub fn force(&mut self) -> &mut Self {
self.params.insert("Force", json!(true));
self
}
pub fn build(&self) -> ContainerConnectionOptions {
ContainerConnectionOptions {
params: self.params.clone(),
}
}
}
type PortDescription = HashMap<String, Option<Vec<HashMap<String, String>>>>;
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkSettings {
pub bridge: String,
pub gateway: String,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u64,
pub mac_address: String,
pub ports: Option<PortDescription>,
pub networks: HashMap<String, NetworkEntry>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkEntry {
#[serde(rename = "NetworkID")]
pub network_id: String,
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub gateway: String,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u64,
#[serde(rename = "IPv6Gateway")]
pub ipv6_gateway: String,
#[serde(rename = "GlobalIPv6Address")]
pub global_ipv6_address: String,
#[serde(rename = "GlobalIPv6PrefixLen")]
pub global_ipv6_prefix_len: u64,
pub mac_address: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NetworkInfo {
pub rx_dropped: u64,
pub rx_bytes: u64,
pub rx_errors: u64,
pub tx_packets: u64,
pub tx_dropped: u64,
pub rx_packets: u64,
pub tx_errors: u64,
pub tx_bytes: u64,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct IPAM {
pub driver: String,
pub config: Vec<HashMap<String, String>>,
pub options: Option<HashMap<String, String>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkDetails {
pub name: String,
pub id: String,
pub scope: String,
pub driver: String,
#[serde(rename = "EnableIPv6")]
pub enable_ipv6: bool,
#[serde(rename = "IPAM")]
pub ipam: IPAM,
pub internal: bool,
pub attachable: bool,
pub containers: HashMap<String, NetworkContainerDetails>,
pub options: Option<HashMap<String, String>>,
pub labels: Option<HashMap<String, String>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkContainerDetails {
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub mac_address: String,
#[serde(rename = "IPv4Address")]
pub ipv4_address: String,
#[serde(rename = "IPv6Address")]
pub ipv6_address: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkCreateInfo {
pub id: String,
pub warning: String,
}
Add missing fields to NetworkEntry
//! Create and manage user-defined networks that containers can be attached to.
//!
//! API Reference: <https://docs.docker.com/engine/api/v1.41/#tag/Network>
use std::{
collections::{BTreeMap, HashMap},
hash::Hash,
};
use hyper::Body;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use url::form_urlencoded;
use crate::{
docker::Docker,
errors::{Error, Result},
};
/// Interface for docker network
pub struct Networks<'docker> {
docker: &'docker Docker,
}
impl<'docker> Networks<'docker> {
/// Exports an interface for interacting with docker Networks
pub fn new(docker: &'docker Docker) -> Self {
Networks { docker }
}
/// List the docker networks on the current docker host
pub async fn list(
&self,
opts: &NetworkListOptions,
) -> Result<Vec<NetworkInfo>> {
let mut path = vec!["/networks".to_owned()];
if let Some(query) = opts.serialize() {
path.push(query);
}
self.docker.get_json(&path.join("?")).await
}
/// Returns a reference to a set of operations available to a specific network instance
pub fn get<S>(
&self,
id: S,
) -> Network<'docker>
where
S: Into<String>,
{
Network::new(self.docker, id)
}
/// Create a new Network instance
pub async fn create(
&self,
opts: &NetworkCreateOptions,
) -> Result<NetworkCreateInfo> {
let body: Body = opts.serialize()?.into();
let path = vec!["/networks/create".to_owned()];
self.docker
.post_json(&path.join("?"), Some((body, mime::APPLICATION_JSON)))
.await
}
}
/// Interface for accessing and manipulating a docker network
pub struct Network<'docker> {
docker: &'docker Docker,
id: String,
}
impl<'docker> Network<'docker> {
/// Exports an interface exposing operations against a network instance
pub fn new<S>(
docker: &'docker Docker,
id: S,
) -> Self
where
S: Into<String>,
{
Network {
docker,
id: id.into(),
}
}
/// a getter for the Network id
pub fn id(&self) -> &str {
&self.id
}
/// Inspects the current docker network instance's details
pub async fn inspect(&self) -> Result<NetworkInfo> {
self.docker
.get_json(&format!("/networks/{}", self.id)[..])
.await
}
/// Delete the network instance
pub async fn delete(&self) -> Result<()> {
self.docker
.delete(&format!("/networks/{}", self.id)[..])
.await?;
Ok(())
}
/// Connect container to network
pub async fn connect(
&self,
opts: &ContainerConnectionOptions,
) -> Result<()> {
self.do_connection("connect", opts).await
}
/// Disconnect container to network
pub async fn disconnect(
&self,
opts: &ContainerConnectionOptions,
) -> Result<()> {
self.do_connection("disconnect", opts).await
}
async fn do_connection(
&self,
segment: &str,
opts: &ContainerConnectionOptions,
) -> Result<()> {
let body: Body = opts.serialize()?.into();
self.docker
.post(
&format!("/networks/{}/{}", self.id, segment)[..],
Some((body, mime::APPLICATION_JSON)),
)
.await?;
Ok(())
}
}
/// Options for filtering networks list results
#[derive(Default, Debug)]
pub struct NetworkListOptions {
params: HashMap<&'static str, String>,
}
impl NetworkListOptions {
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Option<String> {
if self.params.is_empty() {
None
} else {
Some(
form_urlencoded::Serializer::new(String::new())
.extend_pairs(&self.params)
.finish(),
)
}
}
}
/// Interface for creating new docker network
#[derive(Serialize, Debug)]
pub struct NetworkCreateOptions {
params: HashMap<&'static str, Value>,
}
impl NetworkCreateOptions {
/// return a new instance of a builder for options
pub fn builder(name: &str) -> NetworkCreateOptionsBuilder {
NetworkCreateOptionsBuilder::new(name)
}
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Result<String> {
serde_json::to_string(&self.params).map_err(Error::from)
}
pub fn parse_from<'a, K, V>(
&self,
params: &'a HashMap<K, V>,
body: &mut serde_json::Map<String, Value>,
) where
&'a HashMap<K, V>: IntoIterator,
K: ToString + Eq + Hash,
V: Serialize,
{
for (k, v) in params.iter() {
let key = k.to_string();
let value = serde_json::to_value(v).unwrap();
body.insert(key, value);
}
}
}
#[derive(Default)]
pub struct NetworkCreateOptionsBuilder {
params: HashMap<&'static str, Value>,
}
impl NetworkCreateOptionsBuilder {
pub(crate) fn new(name: &str) -> Self {
let mut params = HashMap::new();
params.insert("Name", json!(name));
NetworkCreateOptionsBuilder { params }
}
pub fn driver(
&mut self,
name: &str,
) -> &mut Self {
if !name.is_empty() {
self.params.insert("Driver", json!(name));
}
self
}
pub fn label(
&mut self,
labels: HashMap<String, String>,
) -> &mut Self {
self.params.insert("Labels", json!(labels));
self
}
pub fn build(&self) -> NetworkCreateOptions {
NetworkCreateOptions {
params: self.params.clone(),
}
}
}
/// Interface for connect container to network
#[derive(Serialize, Debug)]
pub struct ContainerConnectionOptions {
params: HashMap<&'static str, Value>,
}
impl ContainerConnectionOptions {
/// serialize options as a string. returns None if no options are defined
pub fn serialize(&self) -> Result<String> {
serde_json::to_string(&self.params).map_err(Error::from)
}
pub fn parse_from<'a, K, V>(
&self,
params: &'a HashMap<K, V>,
body: &mut BTreeMap<String, Value>,
) where
&'a HashMap<K, V>: IntoIterator,
K: ToString + Eq + Hash,
V: Serialize,
{
for (k, v) in params.iter() {
let key = k.to_string();
let value = serde_json::to_value(v).unwrap();
body.insert(key, value);
}
}
/// return a new instance of a builder for options
pub fn builder(container_id: &str) -> ContainerConnectionOptionsBuilder {
ContainerConnectionOptionsBuilder::new(container_id)
}
}
#[derive(Default)]
pub struct ContainerConnectionOptionsBuilder {
params: HashMap<&'static str, Value>,
}
impl ContainerConnectionOptionsBuilder {
pub(crate) fn new(container_id: &str) -> Self {
let mut params = HashMap::new();
params.insert("Container", json!(container_id));
ContainerConnectionOptionsBuilder { params }
}
pub fn aliases(
&mut self,
aliases: Vec<&str>,
) -> &mut Self {
self.params
.insert("EndpointConfig", json!({ "Aliases": json!(aliases) }));
self
}
pub fn force(&mut self) -> &mut Self {
self.params.insert("Force", json!(true));
self
}
pub fn build(&self) -> ContainerConnectionOptions {
ContainerConnectionOptions {
params: self.params.clone(),
}
}
}
type PortDescription = HashMap<String, Option<Vec<HashMap<String, String>>>>;
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkSettings {
pub bridge: String,
pub gateway: String,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u64,
pub mac_address: String,
pub ports: Option<PortDescription>,
pub networks: HashMap<String, NetworkEntry>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkEntry {
#[serde(rename = "NetworkID")]
pub network_id: String,
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub gateway: String,
#[serde(rename = "IPAddress")]
pub ip_address: String,
#[serde(rename = "IPPrefixLen")]
pub ip_prefix_len: u64,
#[serde(rename = "IPv6Gateway")]
pub ipv6_gateway: String,
#[serde(rename = "GlobalIPv6Address")]
pub global_ipv6_address: String,
#[serde(rename = "GlobalIPv6PrefixLen")]
pub global_ipv6_prefix_len: u64,
pub mac_address: String,
pub links: Option<Vec<String>>,
pub aliases: Option<Vec<String>>,
#[serde(rename = "IPAMConfig")]
pub ipam_config: Option<EndpointIPAMConfig>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct EndpointIPAMConfig {
#[serde(rename = "IPv4Address")]
pub ipv4_address: String,
#[serde(rename = "IPv6Address")]
pub ipv6_address: String,
#[serde(rename = "LinkLocalIPs")]
pub link_local_ips: Vec<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NetworkInfo {
pub rx_dropped: u64,
pub rx_bytes: u64,
pub rx_errors: u64,
pub tx_packets: u64,
pub tx_dropped: u64,
pub rx_packets: u64,
pub tx_errors: u64,
pub tx_bytes: u64,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct IPAM {
pub driver: String,
pub config: Vec<HashMap<String, String>>,
pub options: Option<HashMap<String, String>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkDetails {
pub name: String,
pub id: String,
pub scope: String,
pub driver: String,
#[serde(rename = "EnableIPv6")]
pub enable_ipv6: bool,
#[serde(rename = "IPAM")]
pub ipam: IPAM,
pub internal: bool,
pub attachable: bool,
pub containers: HashMap<String, NetworkContainerDetails>,
pub options: Option<HashMap<String, String>>,
pub labels: Option<HashMap<String, String>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkContainerDetails {
#[serde(rename = "EndpointID")]
pub endpoint_id: String,
pub mac_address: String,
#[serde(rename = "IPv4Address")]
pub ipv4_address: String,
#[serde(rename = "IPv6Address")]
pub ipv6_address: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct NetworkCreateInfo {
pub id: String,
pub warning: String,
}
|
#[derive(Debug, serde_derive::Deserialize)]
pub struct Config {
pub name: String,
pub package_key: Option<String>,
pub repo_key: Option<String>,
pub srcdest: String,
pub logdest: String,
pub pkgbuild: String,
pub builds: std::collections::HashMap<super::builder::Arch, BuildConfig>,
pub s3: Option<S3Config>,
}
#[derive(Debug, serde_derive::Deserialize)]
pub struct BuildConfig {
pub chroot: String,
}
#[derive(Debug, serde_derive::Deserialize)]
pub struct S3Config {
pub bucket: String,
pub region: Region,
}
#[derive(Debug)]
pub struct Region(rusoto_core::Region);
impl<'de> serde::Deserialize<'de> for Region {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Region;
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> Result<(), std::fmt::Error> {
write!(formatter, "a valid AWS region name")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use std::str::FromStr;
match rusoto_core::Region::from_str(v) {
Ok(r) => Ok(Region(r)),
Err(e) => Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Str(v),
&format!("{}", e).as_str(),
)),
}
}
}
deserializer.deserialize_str(Visitor {})
}
}
impl Config {
pub fn from_reader<R>(reader: R) -> serde_yaml::Result<Self>
where
R: std::io::Read,
{
serde_yaml::from_reader(reader)
}
pub fn repo_dir(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
std::path::PathBuf::from(&self.name)
.join("os")
.join(format!("{}", arch))
}
pub fn db_path(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
let mut path = self.repo_dir(arch).join(&self.name).into_os_string();
path.push(".db");
std::path::PathBuf::from(path)
}
pub fn files_path(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
let mut path = self.repo_dir(arch).join(&self.name).into_os_string();
path.push(".files");
std::path::PathBuf::from(path)
}
pub fn package_dir(&self, package_name: &str) -> std::path::PathBuf {
std::path::PathBuf::from(&self.pkgbuild).join(package_name)
}
}
pub struct S3 {
client: rusoto_s3::S3Client,
bucket: String,
}
impl S3 {
pub fn new(config: &S3Config) -> Self {
let Region(ref region) = config.region;
let client = rusoto_s3::S3Client::new(region.clone());
S3 {
client,
bucket: config.bucket.to_owned(),
}
}
pub async fn download_repository(
&self,
config: &Config,
arch: &super::builder::Arch,
) -> Result<(), anyhow::Error> {
self.get(config.db_path(arch)).await?;
self.get(config.files_path(arch)).await
}
pub async fn upload_repository<P>(
&self,
config: &Config,
arch: &super::builder::Arch,
package_paths: &[P],
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
const XZ_MIME_TYPE: &str = "application/x-xz";
const SIG_MIME_TYPE: &str = "application/pgp-signature";
const GZIP_MIME_TYPE: &str = "application/gzip";
for package_path in package_paths {
self.put(package_path, XZ_MIME_TYPE).await?;
if config.package_key.is_some() {
let mut sig_path = package_path.as_ref().as_os_str().to_os_string();
sig_path.push(".sig");
self.put(&sig_path, SIG_MIME_TYPE).await?;
}
}
self.put(config.files_path(arch), GZIP_MIME_TYPE).await?;
let db_path = config.db_path(arch);
self.put(&db_path, GZIP_MIME_TYPE).await?;
if config.repo_key.is_some() {
let mut sig_path = db_path.clone().into_os_string();
sig_path.push(".sig");
self.put(sig_path, SIG_MIME_TYPE).await?;
}
Ok(())
}
async fn get<P>(&self, path: P) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
use rusoto_s3::S3;
use std::io::Write;
let path = path.as_ref();
let mut file = std::fs::File::create(path)?;
let request = rusoto_s3::GetObjectRequest {
bucket: self.bucket.to_owned(),
key: path.to_string_lossy().into_owned(),
..rusoto_s3::GetObjectRequest::default()
};
println!("Download {}", path.display());
match self.client.get_object(request).await {
Ok(output) => {
if let Some(body) = output.body {
use futures::StreamExt;
body.for_each(|buf| {
let bytes = buf.unwrap();
file.write_all(&bytes[..]).unwrap();
futures::future::ready(())
})
.await;
}
Ok(())
}
Err(rusoto_core::RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_))) => {
Ok(())
}
Err(e) => Err(anyhow::Error::from(e)),
}
}
async fn put<P>(&self, path: P, content_type: &str) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
use futures::FutureExt as _;
use futures::TryStreamExt as _;
use rusoto_s3::S3;
let path = path.as_ref();
let metadata = tokio::fs::metadata(path).await?;
let stream = rusoto_s3::StreamingBody::new(
tokio::fs::read(path.to_owned())
.into_stream()
.map_ok(|b| bytes::Bytes::from(b)),
);
let request = rusoto_s3::PutObjectRequest {
bucket: self.bucket.to_owned(),
key: path.to_string_lossy().into_owned(),
content_type: Some(content_type.to_owned()),
content_length: Some(metadata.len() as i64),
body: Some(stream),
..Default::default()
};
println!("Upload {}", path.display());
self.client.put_object(request).await?;
Ok(())
}
}
Use tokio helpers
#[derive(Debug, serde_derive::Deserialize)]
pub struct Config {
pub name: String,
pub package_key: Option<String>,
pub repo_key: Option<String>,
pub srcdest: String,
pub logdest: String,
pub pkgbuild: String,
pub builds: std::collections::HashMap<super::builder::Arch, BuildConfig>,
pub s3: Option<S3Config>,
}
#[derive(Debug, serde_derive::Deserialize)]
pub struct BuildConfig {
pub chroot: String,
}
#[derive(Debug, serde_derive::Deserialize)]
pub struct S3Config {
pub bucket: String,
pub region: Region,
}
#[derive(Debug)]
pub struct Region(rusoto_core::Region);
impl<'de> serde::Deserialize<'de> for Region {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Region;
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> Result<(), std::fmt::Error> {
write!(formatter, "a valid AWS region name")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
use std::str::FromStr;
match rusoto_core::Region::from_str(v) {
Ok(r) => Ok(Region(r)),
Err(e) => Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Str(v),
&format!("{}", e).as_str(),
)),
}
}
}
deserializer.deserialize_str(Visitor {})
}
}
impl Config {
pub fn from_reader<R>(reader: R) -> serde_yaml::Result<Self>
where
R: std::io::Read,
{
serde_yaml::from_reader(reader)
}
pub fn repo_dir(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
std::path::PathBuf::from(&self.name)
.join("os")
.join(format!("{}", arch))
}
pub fn db_path(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
let mut path = self.repo_dir(arch).join(&self.name).into_os_string();
path.push(".db");
std::path::PathBuf::from(path)
}
pub fn files_path(&self, arch: &super::builder::Arch) -> std::path::PathBuf {
let mut path = self.repo_dir(arch).join(&self.name).into_os_string();
path.push(".files");
std::path::PathBuf::from(path)
}
pub fn package_dir(&self, package_name: &str) -> std::path::PathBuf {
std::path::PathBuf::from(&self.pkgbuild).join(package_name)
}
}
pub struct S3 {
client: rusoto_s3::S3Client,
bucket: String,
}
impl S3 {
pub fn new(config: &S3Config) -> Self {
let Region(ref region) = config.region;
let client = rusoto_s3::S3Client::new(region.clone());
S3 {
client,
bucket: config.bucket.to_owned(),
}
}
pub async fn download_repository(
&self,
config: &Config,
arch: &super::builder::Arch,
) -> Result<(), anyhow::Error> {
self.get(config.db_path(arch)).await?;
self.get(config.files_path(arch)).await
}
pub async fn upload_repository<P>(
&self,
config: &Config,
arch: &super::builder::Arch,
package_paths: &[P],
) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
const XZ_MIME_TYPE: &str = "application/x-xz";
const SIG_MIME_TYPE: &str = "application/pgp-signature";
const GZIP_MIME_TYPE: &str = "application/gzip";
for package_path in package_paths {
self.put(package_path, XZ_MIME_TYPE).await?;
if config.package_key.is_some() {
let mut sig_path = package_path.as_ref().as_os_str().to_os_string();
sig_path.push(".sig");
self.put(&sig_path, SIG_MIME_TYPE).await?;
}
}
self.put(config.files_path(arch), GZIP_MIME_TYPE).await?;
let db_path = config.db_path(arch);
self.put(&db_path, GZIP_MIME_TYPE).await?;
if config.repo_key.is_some() {
let mut sig_path = db_path.clone().into_os_string();
sig_path.push(".sig");
self.put(sig_path, SIG_MIME_TYPE).await?;
}
Ok(())
}
async fn get<P>(&self, path: P) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
use rusoto_s3::S3;
let path = path.as_ref();
let request = rusoto_s3::GetObjectRequest {
bucket: self.bucket.to_owned(),
key: path.to_string_lossy().into_owned(),
..rusoto_s3::GetObjectRequest::default()
};
println!("Download {}", path.display());
match self.client.get_object(request).await {
Ok(output) => {
if let Some(mut body) = output.body {
use futures::StreamExt as _;
use tokio::io::AsyncWriteExt as _;
let file = tokio::fs::File::create(path).await?;
let mut writer = tokio::io::BufWriter::new(file);
while let Some(item) = body.next().await {
writer.write_all(&item?).await?;
}
writer.shutdown().await?;
}
Ok(())
}
Err(rusoto_core::RusotoError::Service(rusoto_s3::GetObjectError::NoSuchKey(_))) => {
Ok(())
}
Err(e) => Err(anyhow::Error::from(e)),
}
}
async fn put<P>(&self, path: P, content_type: &str) -> Result<(), anyhow::Error>
where
P: AsRef<std::path::Path>,
{
use futures::FutureExt as _;
use futures::TryStreamExt as _;
use rusoto_s3::S3;
let path = path.as_ref();
let metadata = tokio::fs::metadata(path).await?;
let stream = rusoto_s3::StreamingBody::new(
tokio::fs::read(path.to_owned())
.into_stream()
.map_ok(|b| bytes::Bytes::from(b)),
);
let request = rusoto_s3::PutObjectRequest {
bucket: self.bucket.to_owned(),
key: path.to_string_lossy().into_owned(),
content_type: Some(content_type.to_owned()),
content_length: Some(metadata.len() as i64),
body: Some(stream),
..Default::default()
};
println!("Upload {}", path.display());
self.client.put_object(request).await?;
Ok(())
}
}
|
//! This module is strictly internal.
//!
//! These functions are used by `date()`, `time()` and `datetime()`.
//! They are currently not private, because the need to be accessible,
//! but are not useful by themselves.
//!
//! Please refer to the top-level functions instead, as they offer a better abstraction.
//!
//! **These functions may be made private later.**
use helper::*;
use nom::{self, is_digit};
use super::{Time, DateTime, Date};
macro_rules! empty_or(
($i:expr, $submac:ident!( $($args:tt)* )) => (
if $i.len() == 0 {
nom::IResult::Done($i, None)
} else {
match $submac!($i, $($args)*) {
nom::IResult::Done(i,o) => nom::IResult::Done(i, Some(o)),
nom::IResult::Error(_) => nom::IResult::Done($i, None),
nom::IResult::Incomplete(i) => nom::IResult::Incomplete(i)
}
}
);
);
macro_rules! check(
($input:expr, $submac:ident!( $($args:tt)* )) => (
{
let mut failed = false;
for idx in 0..$input.len() {
if !$submac!($input[idx], $($args)*) {
failed = true;
break;
}
}
if failed {
nom::IResult::Error(nom::Err::Position(nom::ErrorKind::Custom(20),$input))
} else {
nom::IResult::Done(&b""[..], $input)
}
}
);
($input:expr, $f:expr) => (
check!($input, call!($f));
);
);
macro_rules! char_between(
($input:expr, $min:expr, $max:expr) => (
{
fn f(c: u8) -> bool { c >= ($min as u8) && c <= ($max as u8)}
flat_map!($input, take!(1), check!(f))
}
);
);
named!(take_4_digits, flat_map!(take!(4), check!(is_digit)));
// year
named!(year_prefix, alt!(tag!("+") | tag!("-")));
named!(year <i32>, chain!(
pref: opt!(year_prefix) ~
year: call!(take_4_digits)
,
|| {
match pref {
Some(b"-") => -buf_to_i32(year),
_ => buf_to_i32(year)
}
}));
// MM
named!(lower_month <u32>, chain!(tag!("0") ~ s:char_between!('1', '9') , || buf_to_u32(s)));
named!(upper_month <u32>, chain!(tag!("1") ~ s:char_between!('0', '2') , || 10+buf_to_u32(s)));
named!(month <u32>, alt!(lower_month | upper_month));
// DD
named!(day_zero <u32>, chain!(tag!("0") ~ s:char_between!('1', '9') , || buf_to_u32(s)));
named!(day_one <u32>, chain!(tag!("1") ~ s:char_between!('0', '9') , || 10+buf_to_u32(s)));
named!(day_two <u32>, chain!(tag!("2") ~ s:char_between!('0', '9') , || 20+buf_to_u32(s)));
named!(day_three <u32>, chain!(tag!("3") ~ s:char_between!('0', '1') , || 30+buf_to_u32(s)));
named!(day <u32>, alt!(day_zero | day_one | day_two | day_three));
// WW
// reusing day_N parsers, sorry
named!(week_three <u32>, chain!(tag!("3") ~ s:char_between!('0', '9') , || 30+buf_to_u32(s)));
named!(week_four <u32>, chain!(tag!("4") ~ s:char_between!('0', '9') , || 40+buf_to_u32(s)));
named!(week_five <u32>, chain!(tag!("5") ~ s:char_between!('0', '3') , || 50+buf_to_u32(s)));
named!(week <u32>, alt!(day_zero | day_one | day_two | week_three| week_four | week_five ));
named!(week_day <u32>, chain!(s:char_between!('1', '7') , || buf_to_u32(s)));
// ordinal DDD
named!(ord_day <u32>, chain!(
a:char_between!('0','3') ~
b:char_between!('0','9') ~
c:char_between!('0','9')
,
|| { buf_to_u32(a)*100 + buf_to_u32(b)*10 + buf_to_u32(c) }
));
// YYYY-MM-DD
named!(pub ymd_date <Date>, chain!(
y: year ~
opt!(tag!("-")) ~
m: month ~
opt!(tag!("-")) ~
d: day
,
|| { Date::YMD{ year: y, month: m, day: d } }
));
// YYYY-MM-DD
named!(pub ordinal_date <Date>, chain!(
y: year ~
opt!(tag!("-")) ~
d: ord_day
,
|| { Date::Ordinal{ year: y, ddd: d } }
));
// YYYY-"W"WW-D
named!(pub iso_week_date <Date>, chain!(
y: year ~
opt!(tag!("-")) ~
tag!("W") ~
w: week ~
opt!(tag!("-")) ~
d: week_day
,
|| { Date::Week{ year: y, ww: w, d: d } }
));
named!(pub parse_date <Date>, alt!( ymd_date | iso_week_date | ordinal_date ) );
// TIME
// HH
named!(lower_hour <u32>, chain!(f:char_between!('0','1') ~ s:char_between!('0','9') ,
|| { buf_to_u32(f)*10 + buf_to_u32(s) } ));
named!(upper_hour <u32>, chain!(tag!("2") ~ s:char_between!('0','4') , || 20+buf_to_u32(s)));
named!(hour <u32>, alt!(lower_hour | upper_hour));
// MM
named!(below_sixty <u32>, chain!(f:char_between!('0','5') ~ s:char_between!('0','9'), || { buf_to_u32(f)*10 + buf_to_u32(s) } ));
named!(upto_sixty <u32>, alt!(below_sixty | map!(tag!("60"), |_| 60)));
named!(minute <u32>, call!(below_sixty));
named!(second <u32>, call!(upto_sixty));
named!(millisecond <u32>, map!( is_a!("0123456789"), |ms| buf_to_u32(ms) ) );
// HH:MM:[SS][.(m*)][(Z|+...|-...)]
named!(pub parse_time <Time>, chain!(
h: hour ~
opt!(tag!(":")) ~
m: minute ~
s: opt!( chain!( opt!(tag!(":")) ~ s:second, || s)) ~
ms: opt!( chain!( tag!(".") ~ ms:millisecond, || ms)) ~
z: opt!( alt!( timezone_hour | timezone_utc) ) ,
|| {
Time {
hour: h,
minute: m,
second: s.unwrap_or(0),
millisecond: ms.unwrap_or(0),
tz_offset_hours: z.unwrap_or((0,0)).0,
tz_offset_minutes: z.unwrap_or((0,0)).1
}
}
));
named!(sign <i32>, alt!(
tag!("-") => { |_| -1 } |
tag!("+") => { |_| 1 }
)
);
named!(timezone_hour <(i32,i32)>, chain!(
s: sign ~
h: hour ~
m: empty_or!(
chain!(
tag!(":")? ~ m: minute , || { m }
))
,
|| { (s * (h as i32) , s * (m.unwrap_or(0) as i32)) }
));
named!(timezone_utc <(i32,i32)>, map!(tag!("Z"), |_| (0,0)));
// Full ISO8601
named!(pub parse_datetime <DateTime>, chain!(
d: parse_date ~
tag!("T") ~
t: parse_time
,
|| {
DateTime{
date: d,
time: t,
}
}
));
#[cfg(test)]
mod tests{
use super::{year, month, day};
use super::{hour, minute, second};
use nom::IResult::*;
#[test]
fn test_year() {
assert_eq!(Done(&[][..], 2015), year(b"2015"));
assert_eq!(Done(&[][..], -0333), year(b"-0333"));
assert_eq!(Done(&b"-"[..], 2015), year(b"2015-"));
assert!(year(b"abcd").is_err());
assert!(year(b"2a03").is_err());
}
#[test]
fn test_month() {
assert_eq!(Done(&[][..], 1), month(b"01"));
assert_eq!(Done(&[][..], 6), month(b"06"));
assert_eq!(Done(&[][..], 12), month(b"12"));
assert_eq!(Done(&b"-"[..], 12), month(b"12-"));
assert!(month(b"13").is_err());
assert!(month(b"00").is_err());
}
#[test]
fn test_day() {
assert_eq!(Done(&[][..], 1), day(b"01"));
assert_eq!(Done(&[][..], 12), day(b"12"));
assert_eq!(Done(&[][..], 20), day(b"20"));
assert_eq!(Done(&[][..], 28), day(b"28"));
assert_eq!(Done(&[][..], 30), day(b"30"));
assert_eq!(Done(&[][..], 31), day(b"31"));
assert_eq!(Done(&b"-"[..], 31), day(b"31-"));
assert!(day(b"00").is_err());
assert!(day(b"32").is_err());
}
#[test]
fn test_hour() {
assert_eq!(Done(&[][..], 0), hour(b"00"));
assert_eq!(Done(&[][..], 1), hour(b"01"));
assert_eq!(Done(&[][..], 6), hour(b"06"));
assert_eq!(Done(&[][..], 12), hour(b"12"));
assert_eq!(Done(&[][..], 13), hour(b"13"));
assert_eq!(Done(&[][..], 20), hour(b"20"));
assert_eq!(Done(&[][..], 24), hour(b"24"));
assert!(hour(b"25").is_err());
assert!(hour(b"30").is_err());
assert!(hour(b"ab").is_err());
}
#[test]
fn test_minute() {
assert_eq!(Done(&[][..], 0), minute(b"00"));
assert_eq!(Done(&[][..], 1), minute(b"01"));
assert_eq!(Done(&[][..], 30), minute(b"30"));
assert_eq!(Done(&[][..], 59), minute(b"59"));
assert!(minute(b"60").is_err());
assert!(minute(b"61").is_err());
assert!(minute(b"ab").is_err());
}
#[test]
fn test_second() {
assert_eq!(Done(&[][..], 0), second(b"00"));
assert_eq!(Done(&[][..], 1), second(b"01"));
assert_eq!(Done(&[][..], 30), second(b"30"));
assert_eq!(Done(&[][..], 59), second(b"59"));
assert_eq!(Done(&[][..], 60), second(b"60"));
assert!(second(b"61").is_err());
assert!(second(b"ab").is_err());
}
}
fixed: opt! behaviour change in nom 1.0.0 final
//! This module is strictly internal.
//!
//! These functions are used by `date()`, `time()` and `datetime()`.
//! They are currently not private, because the need to be accessible,
//! but are not useful by themselves.
//!
//! Please refer to the top-level functions instead, as they offer a better abstraction.
//!
//! **These functions may be made private later.**
use helper::*;
use nom::{self, is_digit};
use super::{Time, DateTime, Date};
macro_rules! empty_or(
($i:expr, $submac:ident!( $($args:tt)* )) => (
if $i.len() == 0 {
nom::IResult::Done($i, None)
} else {
match $submac!($i, $($args)*) {
nom::IResult::Done(i,o) => nom::IResult::Done(i, Some(o)),
nom::IResult::Error(_) => nom::IResult::Done($i, None),
nom::IResult::Incomplete(i) => nom::IResult::Incomplete(i)
}
}
);
);
macro_rules! check(
($input:expr, $submac:ident!( $($args:tt)* )) => (
{
let mut failed = false;
for idx in 0..$input.len() {
if !$submac!($input[idx], $($args)*) {
failed = true;
break;
}
}
if failed {
nom::IResult::Error(nom::Err::Position(nom::ErrorKind::Custom(20),$input))
} else {
nom::IResult::Done(&b""[..], $input)
}
}
);
($input:expr, $f:expr) => (
check!($input, call!($f));
);
);
macro_rules! char_between(
($input:expr, $min:expr, $max:expr) => (
{
fn f(c: u8) -> bool { c >= ($min as u8) && c <= ($max as u8)}
flat_map!($input, take!(1), check!(f))
}
);
);
named!(take_4_digits, flat_map!(take!(4), check!(is_digit)));
// year
named!(year_prefix, alt!(tag!("+") | tag!("-")));
named!(year <i32>, chain!(
pref: opt!(complete!(year_prefix)) ~
year: call!(take_4_digits)
,
|| {
match pref {
Some(b"-") => -buf_to_i32(year),
_ => buf_to_i32(year)
}
}));
// MM
named!(lower_month <u32>, chain!(tag!("0") ~ s:char_between!('1', '9') , || buf_to_u32(s)));
named!(upper_month <u32>, chain!(tag!("1") ~ s:char_between!('0', '2') , || 10+buf_to_u32(s)));
named!(month <u32>, alt!(lower_month | upper_month));
// DD
named!(day_zero <u32>, chain!(tag!("0") ~ s:char_between!('1', '9') , || buf_to_u32(s)));
named!(day_one <u32>, chain!(tag!("1") ~ s:char_between!('0', '9') , || 10+buf_to_u32(s)));
named!(day_two <u32>, chain!(tag!("2") ~ s:char_between!('0', '9') , || 20+buf_to_u32(s)));
named!(day_three <u32>, chain!(tag!("3") ~ s:char_between!('0', '1') , || 30+buf_to_u32(s)));
named!(day <u32>, alt!(day_zero | day_one | day_two | day_three));
// WW
// reusing day_N parsers, sorry
named!(week_three <u32>, chain!(tag!("3") ~ s:char_between!('0', '9') , || 30+buf_to_u32(s)));
named!(week_four <u32>, chain!(tag!("4") ~ s:char_between!('0', '9') , || 40+buf_to_u32(s)));
named!(week_five <u32>, chain!(tag!("5") ~ s:char_between!('0', '3') , || 50+buf_to_u32(s)));
named!(week <u32>, alt!(day_zero | day_one | day_two | week_three| week_four | week_five ));
named!(week_day <u32>, chain!(s:char_between!('1', '7') , || buf_to_u32(s)));
// ordinal DDD
named!(ord_day <u32>, chain!(
a:char_between!('0','3') ~
b:char_between!('0','9') ~
c:char_between!('0','9')
,
|| { buf_to_u32(a)*100 + buf_to_u32(b)*10 + buf_to_u32(c) }
));
// YYYY-MM-DD
named!(pub ymd_date <Date>, chain!(
y: year ~
opt!(complete!(tag!("-"))) ~
m: month ~
opt!(complete!(tag!("-"))) ~
d: day
,
|| { Date::YMD{ year: y, month: m, day: d } }
));
// YYYY-MM-DD
named!(pub ordinal_date <Date>, chain!(
y: year ~
opt!(complete!(tag!("-"))) ~
d: ord_day
,
|| { Date::Ordinal{ year: y, ddd: d } }
));
// YYYY-"W"WW-D
named!(pub iso_week_date <Date>, chain!(
y: year ~
opt!(complete!(tag!("-"))) ~
tag!("W") ~
w: week ~
opt!(complete!(tag!("-"))) ~
d: week_day
,
|| { Date::Week{ year: y, ww: w, d: d } }
));
named!(pub parse_date <Date>, alt!( ymd_date | iso_week_date | ordinal_date ) );
// TIME
// HH
named!(lower_hour <u32>, chain!(f:char_between!('0','1') ~ s:char_between!('0','9') ,
|| { buf_to_u32(f)*10 + buf_to_u32(s) } ));
named!(upper_hour <u32>, chain!(tag!("2") ~ s:char_between!('0','4') , || 20+buf_to_u32(s)));
named!(hour <u32>, alt!(lower_hour | upper_hour));
// MM
named!(below_sixty <u32>, chain!(f:char_between!('0','5') ~ s:char_between!('0','9'), || { buf_to_u32(f)*10 + buf_to_u32(s) } ));
named!(upto_sixty <u32>, alt!(below_sixty | map!(tag!("60"), |_| 60)));
named!(minute <u32>, call!(below_sixty));
named!(second <u32>, call!(upto_sixty));
named!(millisecond <u32>, map!( is_a!("0123456789"), |ms| buf_to_u32(ms) ) );
// HH:MM:[SS][.(m*)][(Z|+...|-...)]
named!(pub parse_time <Time>, chain!(
h: hour ~
opt!(complete!(tag!(":"))) ~
m: minute ~
s: opt!(complete!( chain!( opt!(tag!(":")) ~ s:second, || s))) ~
ms: opt!(complete!( chain!( tag!(".") ~ ms:millisecond, || ms))) ~
z: opt!(complete!( alt!( timezone_hour | timezone_utc) )) ,
|| {
Time {
hour: h,
minute: m,
second: s.unwrap_or(0),
millisecond: ms.unwrap_or(0),
tz_offset_hours: z.unwrap_or((0,0)).0,
tz_offset_minutes: z.unwrap_or((0,0)).1
}
}
));
named!(sign <i32>, alt!(
tag!("-") => { |_| -1 } |
tag!("+") => { |_| 1 }
)
);
named!(timezone_hour <(i32,i32)>, chain!(
s: sign ~
h: hour ~
m: empty_or!(
chain!(
tag!(":")? ~ m: minute , || { m }
))
,
|| { (s * (h as i32) , s * (m.unwrap_or(0) as i32)) }
));
named!(timezone_utc <(i32,i32)>, map!(tag!("Z"), |_| (0,0)));
// Full ISO8601
named!(pub parse_datetime <DateTime>, chain!(
d: parse_date ~
tag!("T") ~
t: parse_time
,
|| {
DateTime{
date: d,
time: t,
}
}
));
#[cfg(test)]
mod tests{
use super::{year, month, day};
use super::{hour, minute, second};
use nom::IResult::*;
#[test]
fn test_year() {
assert_eq!(Done(&[][..], 2015), year(b"2015"));
assert_eq!(Done(&[][..], -0333), year(b"-0333"));
assert_eq!(Done(&b"-"[..], 2015), year(b"2015-"));
assert!(year(b"abcd").is_err());
assert!(year(b"2a03").is_err());
}
#[test]
fn test_month() {
assert_eq!(Done(&[][..], 1), month(b"01"));
assert_eq!(Done(&[][..], 6), month(b"06"));
assert_eq!(Done(&[][..], 12), month(b"12"));
assert_eq!(Done(&b"-"[..], 12), month(b"12-"));
assert!(month(b"13").is_err());
assert!(month(b"00").is_err());
}
#[test]
fn test_day() {
assert_eq!(Done(&[][..], 1), day(b"01"));
assert_eq!(Done(&[][..], 12), day(b"12"));
assert_eq!(Done(&[][..], 20), day(b"20"));
assert_eq!(Done(&[][..], 28), day(b"28"));
assert_eq!(Done(&[][..], 30), day(b"30"));
assert_eq!(Done(&[][..], 31), day(b"31"));
assert_eq!(Done(&b"-"[..], 31), day(b"31-"));
assert!(day(b"00").is_err());
assert!(day(b"32").is_err());
}
#[test]
fn test_hour() {
assert_eq!(Done(&[][..], 0), hour(b"00"));
assert_eq!(Done(&[][..], 1), hour(b"01"));
assert_eq!(Done(&[][..], 6), hour(b"06"));
assert_eq!(Done(&[][..], 12), hour(b"12"));
assert_eq!(Done(&[][..], 13), hour(b"13"));
assert_eq!(Done(&[][..], 20), hour(b"20"));
assert_eq!(Done(&[][..], 24), hour(b"24"));
assert!(hour(b"25").is_err());
assert!(hour(b"30").is_err());
assert!(hour(b"ab").is_err());
}
#[test]
fn test_minute() {
assert_eq!(Done(&[][..], 0), minute(b"00"));
assert_eq!(Done(&[][..], 1), minute(b"01"));
assert_eq!(Done(&[][..], 30), minute(b"30"));
assert_eq!(Done(&[][..], 59), minute(b"59"));
assert!(minute(b"60").is_err());
assert!(minute(b"61").is_err());
assert!(minute(b"ab").is_err());
}
#[test]
fn test_second() {
assert_eq!(Done(&[][..], 0), second(b"00"));
assert_eq!(Done(&[][..], 1), second(b"01"));
assert_eq!(Done(&[][..], 30), second(b"30"));
assert_eq!(Done(&[][..], 59), second(b"59"));
assert_eq!(Done(&[][..], 60), second(b"60"));
assert!(second(b"61").is_err());
assert!(second(b"ab").is_err());
}
}
|
/**
This module contains the bulk of the header-processing code, and the core structures.
Note that it specifically *does not* contain conditional expression handling, or feature set abstractions.
*/
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fs;
use std::path;
use std::rc::Rc;
use itertools::Itertools;
use {ExpConfig, GenConfig, NativeCallConv, OutConfig, WinVersion};
use clang::{
self,
Index, RcIndexExt,
TranslationUnit, TranslationUnitFlags,
Cursor, CursorKind,
};
use features::{Features, scan_features};
use util;
const EMIT_STUBS: bool = false;
/**
This is effectively the "entry point" for processing. Given a header and a configuration, it attempts to generate a Rust binding.
*/
pub fn process_header(path: &str, gen_config: &GenConfig, out_config: &OutConfig) {
info!("using clang version {}", clang::version());
let index = Index::create(
/*exclude_declarations_from_pch*/ false,
/*display_diagnostics*/ false,
);
let mut out_items = OutputItems::new();
let mut cache = Cache::new(index, gen_config);
// Expand once for each expansion config.
for exp_config in &gen_config.exp_configs {
info!("expanding with config {:?}", exp_config);
info!(".. switches: {:?}", exp_config.switches());
let tu = cache.tu.parse_translation_unit(path, exp_config).ok().expect("parse TU");
let renames = scan_for_renames(tu.clone(), gen_config);
process_decls(tu, gen_config, exp_config, &mut out_items, &mut cache, &renames);
}
info!("generating output...");
let out_files = &mut OutputFiles::new(out_config);
output_header_items(&out_items, out_files);
output_func_items(&out_items, out_files, out_config);
info!("sanity-checking features...");
sanity_check_features(&mut cache);
}
/**
A helper method that yields the "next" cursor to process from both a primary sequence and a list of deferred items.
*/
fn next_from(
dc: &mut ::std::vec::IntoIter<Cursor>,
d: &mut Vec<Cursor>,
di: &mut Option<::std::vec::IntoIter<Cursor>>
) -> Option<Cursor> {
use std::mem::replace;
if let Some(cur) = di.as_mut().and_then(|di| di.next()) {
return Some(cur)
}
*di = None;
if d.len() > 0 {
*di = Some(replace(d, vec![]).into_iter());
return next_from(dc, d, di);
}
dc.next()
}
struct Renames {
renames: HashMap<Cursor, Cursor>,
invalidations: HashSet<Cursor>,
}
impl Renames {
fn add_rename(&mut self, from: Cursor, to: Cursor) {
assert!(!self.is_renamed(&from), "definition is already renamed");
assert!(!self.invalidations.contains(&to), "already have invalidation");
self.renames.insert(from, to.clone());
self.invalidations.insert(to);
}
fn is_invalidated(&self, decl_cur: &Cursor) -> bool {
self.invalidations.contains(decl_cur)
}
fn is_renamed(&self, defn_cur: &Cursor) -> bool {
self.renames.contains_key(defn_cur)
}
fn rename_decl<'a, 'b>(&'a self, decl_cur: &'b Cursor) -> Result<&'a Cursor, &'b Cursor> {
if let Some(&ref cur) = self.renames.get(decl_cur) {
Ok(cur)
} else {
Err(decl_cur)
}
}
fn rename_ty(&self, ty: clang::Type) -> Result<clang::Cursor, clang::Type> {
use clang::TypeKind as TK;
// We're only concerned about structs, enums and unions.
match ty.kind() {
TK::Record | TK::Enum => {
let ty_decl = match ty.declaration().definition() {
Some(cur) => cur,
None => return Err(ty)
};
if let Some(&ref cur) = self.renames.get(&ty_decl) {
return Ok(cur.clone());
}
Err(ty)
},
_ => Err(ty)
}
}
}
impl Default for Renames {
fn default() -> Self {
Renames {
renames: HashMap::new(),
invalidations: HashSet::new(),
}
}
}
fn scan_for_renames(tu: Rc<TranslationUnit>, gen_config: &GenConfig) -> Renames {
/*
The goal here is to find two kinds of things:
1. Types which have *no* name in tag space, but are given one via typedef.
2. Types which *have* a name in tag space, but the *canonical* name is given via typedef.
We handle both of these by scanning through all the typedefs. If we find one whose subject is one of the above types, we record the subject's cursor and the *new* name, as well as an "invalidation" of the typedef's cursor.
*/
info!("scanning for renames...");
let mut renames = Renames::default();
let mut decl_curs = tu.cursor().children().into_iter();
let mut deferred: Vec<Cursor> = vec![];
let mut deferred_iter = None;
while let Some(decl_cur) = next_from(&mut decl_curs, &mut deferred, &mut deferred_iter) {
if !gen_config.should_ignore(&decl_cur) {
let rename = scan_decl_for_rename(decl_cur, gen_config, &renames, &mut |cur| deferred.push(cur));
if let Some((from, to)) = rename {
renames.add_rename(from, to);
}
}
}
renames
}
fn scan_decl_for_rename<Defer>(
decl_cur: Cursor,
gen_config: &GenConfig,
renames: &Renames,
mut defer: Defer,
) -> Option<(Cursor, Cursor)>
where Defer: FnMut(Cursor) {
use clang::CursorKind as CK;
use clang::TypeKind as TK;
match decl_cur.kind() {
CK::TypedefDecl => (),
_ => return None
}
debug!("scan_decl_for_rename({}, ..)", decl_cur);
let ty = decl_cur.typedef_decl_underlying_type();
// Resolve unexposed types if possible.
let ty = match ty.kind() {
TK::Unexposed => ty.canonical(),
_ => ty
};
// We don't want to even look at things that aren't just direct ADT typedefs.
match ty.kind() {
TK::Record | TK::Enum => (),
other => {
debug!(".. ignoring indirect typedef: {:?}", other);
return None;
}
}
// Get the original type definition.
let ty_defn = ty.declaration().definition();
let ty_defn = match ty_defn {
Some(c) => c,
None => {
debug!(".. ignoring; has no definition");
return None;
}
};
// Double-check that this is really what we think it is.
match ty_defn.kind() {
CK::StructDecl | CK::EnumDecl | CK::UnionDecl => (),
other => {
debug!(".. ignoring non-adt target: {:?}", other);
return None;
}
};
// Defer all the child cursors.
for child in ty_defn.children() { defer(child); }
// Check if it's already canonical.
let ty_name = ty_defn.spelling();
if !gen_config.is_tag_name_non_canonical(&ty_name) {
debug!(".. ignoring canonical name {:?}", ty_name);
return None;
}
// Finally, make sure we aren't renaming it more than once.
if renames.is_renamed(&ty_defn) {
debug!(".. ignoring; already renamed");
return None;
}
// Do the rename.
debug!(".. renaming {} to {}", ty_defn, decl_cur);
Some((ty_defn, decl_cur))
}
fn process_decls(
tu: Rc<TranslationUnit>,
gen_config: &GenConfig,
exp_config: &ExpConfig,
output: &mut OutputItems,
cache: &mut Cache,
renames: &Renames,
) {
let feat_mask = exp_config.arch.to_features();
let mut decl_curs = tu.cursor().children().into_iter();
let mut deferred: Vec<Cursor> = vec![];
let mut deferred_iter = None;
while let Some(decl_cur) = next_from(&mut decl_curs, &mut deferred, &mut deferred_iter) {
/*
Something to be aware of: a symbol might match the ignore patterns, but have been renamed to something that *doesn't*. We need to check for this.
*/
if gen_config.should_ignore(&decl_cur) && !renames.is_renamed(&decl_cur) {
debug!("ignoring: {}", decl_cur);
} else if renames.is_invalidated(&decl_cur) {
debug!("invalidated: {}", decl_cur);
} else {
process_decl(
decl_cur,
feat_mask.clone(),
exp_config.native_cc,
output,
cache,
renames,
&mut |cur| deferred.push(cur),
);
}
}
}
fn output_header_items(items: &OutputItems, output: &mut OutputFiles) {
let mut lines = vec![];
for (_, decls) in &items.header_items {
for &(idx, ref header, ref feat, ref decl, ref annot) in decls {
lines.push((header, idx, feat, decl, annot));
}
}
lines.sort();
let lines = lines.into_iter()
.group_by_lazy(|&(header, _, feat, _, _)| (header, feat));
for ((header, feat), group) in &lines {
for (_, _, _, decl, annot) in group {
output.emit_to_header(header, feat, decl, annot);
}
}
}
fn output_func_items(items: &OutputItems, output: &mut OutputFiles, out_config: &OutConfig) {
let mut lines = vec![];
for (name, decls) in &items.fn_items {
for &(_, ref feat, ref cconv, ref decl, ref annot) in decls {
for &ref lib in out_config.get_fn_libs(name) {
lines.push((lib, feat, name, cconv, decl, annot));
}
}
}
lines.sort();
let lines = lines.into_iter()
.group_by_lazy(|&(lib, feat, _, cconv, _, _)| (lib, feat, cconv));
for ((lib, feat, cconv), group) in &lines {
for (_, _, _, _, decl, annot) in group {
output.emit_to_library(lib, feat, cconv, decl, annot);
}
}
}
fn sanity_check_features(cache: &mut Cache) {
use std::collections::BTreeSet;
let mut weird_vers = BTreeSet::new();
cache.iter_features(|path, line, &ref feat| {
use features::Partitions;
/*
What we're looking for are any features that might mess up the expansion. This currently means:
- Features with upper limits on versions.
- Features that *do not* target the desktop.
*/
let mut suspect = vec![];
if let Some(ref parts) = feat.parts {
if (parts.clone() & Partitions::DesktopApp).is_empty() {
suspect.push("non-desktop-app");
}
}
if let Some(ref winver) = feat.winver {
if !winver.is_simple() {
for &ref range in winver.ranges() {
weird_vers.insert(range.end);
}
suspect.push("complex-winver");
}
}
if suspect.len() != 0 {
warn!("suspect feature set: {}:{}: {} {:?}",
path, line, suspect.connect(", "), feat);
}
});
if weird_vers.len() > 0 {
warn!("suspect versions:");
for ver in weird_vers {
warn!(".. 0x{:08x} - {:?}",
ver, WinVersion::from_u32_round_up(ver));
}
}
}
/**
Processes a single declaration.
*/
fn process_decl<Defer>(
decl_cur: Cursor,
feat_mask: Features,
native_cc: NativeCallConv,
output: &mut OutputItems,
cache: &mut Cache,
renames: &Renames,
defer: &mut Defer,
)
where Defer: FnMut(Cursor)
{
use clang::CursorKind as CK;
let decl_kind = match decl_cur.kind() {
CK::InclusionDirective
| CK::MacroInstantiation
=> return,
kind => kind
};
let decl_loc = decl_cur.location();
debug!("process_decl: {}: {:?} {}",
decl_loc.display_short(),
decl_kind,
decl_cur.spelling());
let feat = decl_loc.file()
.map(|file| get_features_at(file, decl_loc.line(), cache))
.unwrap_or_else(|| Features::default());
debug!(".. process_decl feat: {:?}", feat);
/*
This is kind of a pain, but as it turns out, different architectures can cause some things to behave in weird ways. For example, `DWORD` might be a typedef on one arch, but a macro on another, which leads to a different expansion. Hooray!
*/
let feat = match feat.and(feat_mask).check_valid() {
Ok(feat) => feat,
Err(err) => {
/*
This is *very definitely* a problem. This means that the pre-processor has emitted code that our feature set says we shouldn't ever reach!
This generally means one of two things: either the feature set calculation is wrong *or* the set of pre-defined symbols is incomplete/incorrect.
*/
panic!(".. invalid feature set for {}: {}", decl_cur, err)
},
};
let decl_cur_copy = decl_cur.clone();
let result = match decl_kind {
CK::InclusionDirective
| CK::MacroInstantiation
=> unreachable!(),
CK::StructDecl => process_struct_decl(decl_cur, output, feat, renames, defer),
CK::UnionDecl => process_union_decl(decl_cur, output, feat, renames, defer),
CK::EnumDecl => process_enum_decl(decl_cur, output, feat, renames, defer),
CK::FunctionDecl => process_function_decl(decl_cur, output, feat, renames, native_cc),
CK::TypedefDecl => process_typedef_decl(decl_cur, output, feat, renames),
CK::MacroDefinition => process_macro_defn(decl_cur, output, feat),
kind => {
warn!("could-not-translate unsupported {:?} {} at {}",
kind, decl_cur.spelling(), decl_loc.display_short());
Ok(())
}
};
if let Err(err) = result {
warn!("could-not-translate misc {}: {}", decl_cur_copy, err);
}
}
fn file_stem(cur: &Cursor) -> String {
cur.location().file().expect("valid file for file_stem").name()
}
/**
Process a single structure declaration.
*/
fn process_struct_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
use clang::CursorKind as CK;
debug!("process_struct_decl({}, ..)", decl_cur);
let (name, header) = match renames.rename_decl(&decl_cur) {
Ok(cur) => {
debug!(".. was renamed to {}", cur);
(cur.spelling(), file_stem(&cur))
},
Err(cur) => (try!(name_for_maybe_anon(&cur)), file_stem(&cur))
};
let annot = decl_cur.location().display_short().to_string();
match (decl_cur.is_definition(), decl_cur.definition().is_none()) {
(false, false) => {
debug!(".. skipping forward declaration");
return Ok(());
},
(false, true) => {
// There *is no* definition!
debug!(".. no definition found");
let decl = format!("#[repr(C)] pub struct {};", name);
output.add_header_item(name, header, feat, decl, annot);
return Ok(())
},
(true, _) => ()
}
let mut fields = vec![];
for child_cur in decl_cur.children() {
match child_cur.kind() {
CK::StructDecl
| CK::UnionDecl
=> {
// Defer.
defer(child_cur);
},
CK::FieldDecl => {
let name = child_cur.spelling();
let ty = match trans_type(child_cur.type_(), renames) {
Ok(ty) => ty,
Err(err) => {
if EMIT_STUBS {
// TODO: just stub for now.
let decl = format!("#[repr(C)] pub struct {}; /* ERR STUB! */", name);
output.add_header_item(name, header, feat, decl, annot);
}
return Err(err);
}
};
fields.push(format!("{}: {}", name, ty));
},
CK::UnexposedAttr => {
// Skip.
},
other => panic!("nyi {:?}", other)
}
}
let decl = match fields.len() {
// Why did this have to be special-cased? :(
0 => format!(
"#[repr(C)] pub struct {name};",
name = name,
),
_ => format!(
"#[repr(C)] pub struct {name} {{ {fields} }}",
name = name,
fields = fields.connect(", "),
)
};
output.add_header_item(name, header, feat, decl, annot);
Ok(())
}
/**
Process a single union declaration.
*/
fn process_union_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
_renames: &Renames,
_defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
debug!("process_union_decl({}, ..)", decl_cur);
let name = try!(name_for_maybe_anon(&decl_cur));
let header = file_stem(&decl_cur);
let annot = decl_cur.location().display_short().to_string();
if EMIT_STUBS {
let decl = format!(
"#[repr(C)] pub /*union*/ struct {name}; /* STUB! */",
name = name,
);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Process a single enum declaration.
*/
fn process_enum_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
_renames: &Renames,
_defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
debug!("process_enum_decl({}, ..)", decl_cur);
let name = try!(name_for_maybe_anon(&decl_cur));
let header = file_stem(&decl_cur);
let annot = decl_cur.location().display_short().to_string();
if EMIT_STUBS {
let decl = format!(
"#[repr(C)] pub enum {name}; /* STUB! */",
name = name,
);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Works out a name for the given structure, even if it doesn't otherwise *have* one.
*/
fn name_for_maybe_anon(decl_cur: &Cursor) -> Result<String, String> {
// TODO: Use clang_Cursor_isAnonymous once its released.
let name = decl_cur.spelling();
if name == "" {
/*
This is *probably* an anonymous type. We need to give it a name that will be both reasonable *and* stable across invocations.
*/
return Err(format!("anonymous-struct {}", decl_cur));
}
Ok(name)
}
/**
Process a single function declaration.
*/
fn process_function_decl(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
native_cc: NativeCallConv
) -> Result<(), String> {
use clang::CallingConv as CC;
use ::NativeCallConv as NCC;
debug!("process_function_decl({}, _)", decl_cur);
// Is this an inline function?
let children = decl_cur.children();
if children.len() > 0 && children.last().unwrap().kind() == CursorKind::CompoundStmt {
// Err... *might be*
return Err("inline-fn".into());
}
let ty = decl_cur.type_();
let cconv = match (ty.calling_conv(), native_cc) {
(CC::C, NCC::C) => AbsCallConv::System,
(CC::C, _) => AbsCallConv::ExplicitlyC,
(CC::X86StdCall, NCC::Stdcall) => AbsCallConv::System,
(cconv, _) => {
return Err(format!("bad-cconv {:?}", cconv));
}
};
let name = decl_cur.spelling();
let res_ty = if ty.result().kind() == clang::TypeKind::Void {
String::new()
} else {
format!(" -> {}", try!(trans_type(ty.result(), renames)))
};
let arg_tys: Vec<String> = try!(ty.args().into_iter().map(|ty| trans_type(ty, renames)).collect());
let arg_tys = arg_tys.connect(", ");
let decl = format!(
r#"pub fn {name}({arg_tys}){res_ty};"#,
name = name,
arg_tys = arg_tys,
res_ty = res_ty,
);
let annot = decl_cur.location().display_short().to_string();
output.add_func_item(name, feat, cconv, decl, annot);
Ok(())
}
/**
Process a single structure declaration.
*/
fn process_typedef_decl(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
) -> Result<(), String> {
debug!("process_typedef_decl({}, ..)", decl_cur);
let name = decl_cur.spelling();
let header = file_stem(&decl_cur);
let ty = decl_cur.typedef_decl_underlying_type();
let ty = try!(trans_type(ty, renames));
let decl = format!("pub type {} = {};", name, ty);
let annot = decl_cur.location().display_short().to_string();
output.add_header_item(name, header, feat, decl, annot);
Ok(())
}
/**
Process a single macro definition.
*/
fn process_macro_defn(defn_cur: Cursor, output: &mut OutputItems, feat: Features) -> Result<(), String> {
debug!("process_macro_defn({}, ..)", defn_cur);
let name = defn_cur.spelling();
let header = file_stem(&defn_cur);
let annot = defn_cur.location().display_short().to_string();
// Note: we skip the last token because it's just a newline.
let toks = defn_cur.tokenize();
let first_tok = toks.at(0);
let next_tok = toks.get(1);
let toks: Vec<_> = toks.into_iter().dropping(1).dropping_back(1).map(|t| t.spelling()).collect();
// Work out whether this is a functionish macro or not.
let is_fn_macro = {
let first_col = first_tok.extent().expect("extent for macro first tok").end().column();
let next_col = next_tok.map(|t| t.extent().expect("extent for macro next tok").end().column()).unwrap_or(!0);
first_col + 1 == next_col
};
if EMIT_STUBS {
let spacer = if is_fn_macro { "" } else { " " };
let defn = toks.connect(" ");
let decl = format!("// #define {}{}{}", name, spacer, defn);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Translate a type into an equivalent Rust type reference.
Note that this **is not** for translating type declarations; you cannot just pass a structure definition.
*/
fn trans_type(ty: clang::Type, renames: &Renames) -> Result<String, String> {
use clang::TypeKind as TK;
debug!("trans_type({:?} {:?}, _)", ty.kind(), ty.spelling());
/**
This works out the module qualifier for a given type. This is intended to let you put types into files based on their source header.
*/
fn mod_qual(cur: &Cursor) -> String {
let file = cur.location().file();
match file.map(|f| f.name()) {
Some(name) => format!("::{}::", name),
None => String::new()
}
}
let ty = match renames.rename_ty(ty) {
Ok(cur) => {
// Use whatever we've been given and don't look too closely...
let qual = mod_qual(&cur);
return Ok(format!("{}{}", qual, cur.spelling()));
},
Err(ty) => ty
};
match ty.kind() {
TK::Invalid => Err("invalid type".into()),
TK::Unexposed => {
let canon_ty = ty.canonical();
match canon_ty.kind() {
TK::Unexposed => Err(format!("recursively unexposed type {}", canon_ty.spelling())),
_ => trans_type(canon_ty, renames)
}
},
// Basic types.
TK::Void => Ok("libc::c_void".into()),
TK::Char_U | TK::UChar => Ok("libc::c_uchar".into()),
TK::Char16 => Ok("u16".into()),
// **Note**: *not* `char` because C++ doesn't appear to guarantee that a value of type char32_t is a valid UTF-32 code unit.
TK::Char32 => Ok("u32".into()),
TK::UShort => Ok("libc::c_ushort".into()),
TK::UInt => Ok("libc::c_uint".into()),
TK::ULong => Ok("libc::c_ulong".into()),
TK::ULongLong => Ok("libc::c_ulonglong".into()),
TK::Char_S => Ok("libc::c_schar".into()),
TK::SChar => Ok("libc::c_schar".into()),
TK::WChar => Ok("libc::wchar_t".into()),
TK::Short => Ok("libc::c_short".into()),
TK::Int => Ok("libc::c_int".into()),
TK::Long => Ok("libc::c_long".into()),
TK::LongLong => Ok("libc::c_longlong".into()),
TK::Float => Ok("libc::c_float".into()),
TK::Double => Ok("libc::c_double".into()),
TK::NullPtr => Ok("*mut libc::c_void".into()),
// Constructed types.
TK::Pointer => {
// We want to know whether the thing we're pointing to is const or not.
let pointee_ty = ty.pointee();
let mut_ = if pointee_ty.is_const_qualified() { "const" } else { "mut" };
Ok(format!("*{} {}", mut_, try!(trans_type(pointee_ty, renames))))
},
TK::Record
| TK::Enum
| TK::Typedef
=> {
// **Note**: use the decl to avoid const-qualification. This might not be correct.
let decl_cur = ty.declaration();
Ok(format!("{}{}", mod_qual(&decl_cur), decl_cur.spelling()))
},
TK::ConstantArray => {
let elem_ty = ty.array_element_type();
let mut_ = if elem_ty.is_const_qualified() { "const" } else { "mut" };
let len = ty.array_size();
Ok(format!("*{} [{}; {}]", mut_, try!(trans_type(elem_ty, renames)), len))
},
TK::IncompleteArray => {
let elem_ty = ty.array_element_type();
let mut_ = if elem_ty.is_const_qualified() { "const" } else { "mut" };
Ok(format!("*{} {}", mut_, try!(trans_type(elem_ty, renames))))
},
// **Note**: This isn't currently in `libc`, and does *not* have a platform-independent definition.
TK::Bool
| TK::UInt128
| TK::Int128
| TK::LongDouble
| TK::Overload
| TK::Dependent
| TK::ObjCId
| TK::ObjCClass
| TK::ObjCSel
| TK::Complex
| TK::BlockPointer
| TK::LValueReference
| TK::RValueReference
| TK::ObjCInterface
| TK::ObjCObjectPointer
| TK::FunctionNoProto
| TK::FunctionProto
| TK::Vector
| TK::VariableArray
| TK::DependentSizedArray
| TK::MemberPointer
=> Err(format!("unsupported type {:?}", ty.kind()))
}
}
/**
Calculate the feature set map for a given file.
*/
fn get_all_features<'a>(file: clang::File, cache: &'a mut Cache) -> &'a BTreeMap<u32, Features> {
let path = file.file_name();
let tu_cache = &mut cache.tu;
let fmap = cache.features.entry(path.clone()).or_insert_with(||
scan_features(get_token_lines(file, tu_cache)));
fmap
}
/**
Calculate the feature set at a given line.
*/
fn get_features_at(file: clang::File, line: u32, cache: &mut Cache) -> Features {
use std::collections::Bound;
debug!("get_features_at({:?}, {}, _)", file.file_name(), line);
let fmap = get_all_features(file, cache);
fmap.range(Bound::Unbounded, Bound::Included(&line)).next_back()
.map(|(i, v)| {
debug!(".. found: {}: {:?}", i, v);
v.clone()
})
.unwrap_or_else(|| Features::default())
}
/**
Returns a given file as a sequence of `(line_number, tokens)` pairs.
*/
fn get_token_lines(file: clang::File, tu_cache: &mut TuCache) -> Vec<(u32, Vec<clang::Token>)> {
debug!("get_token_lines({:?}, _)", file.file_name());
let path = file.file_name();
// Architecture shouldn't matter since we just want the tokens.
let tu = tu_cache.parse_translation_unit(&path, &ExpConfig::DUMMY_CFG).unwrap();
// Get the set of line numbers which *contain* a line continuation.
let cont_lines: Vec<_> = util::read_lines(&path)
.map(|rs| rs.unwrap())
.enumerate()
.filter(|&(_, ref s)| s.trim_right().ends_with("\\"))
// +1 because enumerate is 0-based, line numbers are 1-based
.map(|(i, _)| (i + 1) as u32) // TODO: checked
.collect();
// Work out the starting line for continued lines.
let mut line_starts = HashMap::new();
for i in cont_lines {
let start = *line_starts.get(&i).unwrap_or(&i);
line_starts.insert(i+1, start);
}
// Change each line's line number to be the *first* line the (possibly continued) line starts on.
let remap_line_number = |tok: &clang::Token| {
let l = tok.location().line();
*line_starts.get(&l).unwrap_or(&l)
};
// Grab all the tokens, then re-group them by logical line.
tu.tokenize().into_iter().group_by(remap_line_number).collect()
}
/**
Bundles together any caches we need for efficiency.
*/
struct Cache<'a> {
/// Parsed Clang `TranslationUnit`s.
tu: TuCache<'a>,
/// Evaluated per-line feature sets.
features: HashMap<String, BTreeMap<u32, Features>>,
}
impl<'a> Cache<'a> {
fn new(index: Rc<Index>, gen_config: &'a GenConfig) -> Self {
Cache {
tu: TuCache::new(index, gen_config),
features: HashMap::new(),
}
}
/**
Iterates over all feature sets.
Note that this works by iterating over the underlying feature set maps. What this gives you *in effect* is the evaluated feature set of *every* conditional compilation branch.
*/
fn iter_features<F>(&mut self, mut f: F)
where F: FnMut(&str, u32, &Features) {
for (&ref name, &ref map) in self.features.iter() {
for (&line, &ref feat) in map.iter() {
f(name, line, feat);
}
}
}
}
/**
Used to centralise how output of translated items is done.
One of the major reasons for this is to consolidate disparate bindings. That is, if the output for both x86 and x86-64 are the same, then they should use a *single* declaration with an appropriate `#[cfg]` attribute.
Note that `annot` is used for "annotations", which are free-form strings that may be emitted as comments in the output. These are handy for identifying, for example, *where* a declaration originally came from, for debugging purposes.
*/
struct OutputItems {
next_seq_id: u64,
/// `[name => [(feat, cconv, decl, annot)]]`
fn_items: HashMap<String, Vec<(u64, Features, AbsCallConv, String, String)>>,
/// `[name => [(header, feat, decl, annot)]]`
header_items: HashMap<String, Vec<(u64, String, Features, String, String)>>,
}
impl OutputItems {
fn new() -> Self {
OutputItems {
next_seq_id: 0,
fn_items: HashMap::new(),
header_items: HashMap::new(),
}
}
/**
Adds a function declaration.
If the given `decl` matches an already existing `decl` with the same `name`, the existing entry will have its feature set unioned with `feat`, and `annot` appended to its annotation.
*/
fn add_func_item(&mut self, name: String, feat: Features, cconv: AbsCallConv, decl: String, annot: String) {
use std::mem::replace;
debug!("add_func_item({:?}, {:?}, {:?}, {:?}, {:?})", name, feat, cconv, decl, annot);
let decls = self.fn_items.entry(name).or_insert(vec![]);
// Is there already a decl which is compatible with this one?
for &mut (_, ref mut df, ref dcc, ref dd, ref mut da) in decls.iter_mut() {
if *dd == decl && *dcc == cconv {
debug!(".. merging");
// The decls are the same. Just combine the feature sets together.
let new_df = replace(df, Features::default()).or(feat);
*df = new_df;
if *da != annot {
da.push_str(", ");
da.push_str(&annot);
}
return;
}
}
// Add it to the set of decls.
debug!(".. adding");
decls.push((self.next_seq_id, feat, cconv, decl, annot));
self.next_seq_id += 1;
}
/**
Adds a header declaration.
If the given `decl` matches an already existing `decl` with the same `name`, the existing entry will have its feature set unioned with `feat`, and `annot` appended to its annotation.
*/
fn add_header_item(&mut self, name: String, header: String, feat: Features, decl: String, annot: String) {
use std::mem::replace;
debug!("add_header_item({:?}, {:?}, {:?}, {:?}, {:?})", header, name, feat, decl, annot);
let decls = self.header_items.entry(name).or_insert(vec![]);
// Is there already a decl which is compatible with this one?
for &mut (_, ref dh, ref mut df, ref dd, ref mut da) in decls.iter_mut() {
if *dh == header && *dd == decl {
debug!(".. merging");
// The decls are the same. Just combine the feature sets together.
let new_df = replace(df, Features::default()).or(feat);
*df = new_df;
da.push_str(", ");
da.push_str(&annot);
return;
}
}
// Add it to the set of decls.
debug!(".. adding");
decls.push((self.next_seq_id, header, feat, decl, annot));
self.next_seq_id += 1;
}
}
/**
This cache owns the output files and saves us from constantly opening and closing them.
*/
struct OutputFiles<'a> {
out_config: &'a OutConfig,
files: HashMap<path::PathBuf, (fs::File, Option<(Features, AbsCallConv)>)>,
}
impl<'a> OutputFiles<'a> {
fn new(out_config: &'a OutConfig) -> Self {
OutputFiles {
out_config: out_config,
files: HashMap::new(),
}
}
fn emit_to_header(&mut self, name: &str, feat: &Features, decl: &str, annot: &str) {
use std::io::prelude::*;
let (file, _) = self.get_file(name, &self.out_config.header_path);
writeln!(file, "{}{} /* {} */", feat, decl, annot).unwrap();
}
fn emit_to_library(&mut self, name: &str, feat: &Features, cconv: &AbsCallConv, decl: &str, annot: &str) {
use std::io::prelude::*;
let (file, group) = self.get_file(name, &self.out_config.library_path);
match *group {
Some((ref gf, ref gcc)) if gf == feat && gcc == cconv => (),
Some(_) => {
writeln!(file, "}}\n{}\nextern {:?} {{", feat, cconv.as_str()).unwrap();
},
None => {
writeln!(file, "{}\nextern {:?} {{", feat, cconv.as_str()).unwrap();
}
}
writeln!(file, " {} /* {} */", decl, annot).unwrap();
*group = Some((feat.clone(), cconv.clone()));
}
fn get_file<'b>(
&'b mut self,
name: &str,
pattern: &str
) -> (&'b mut fs::File, &'b mut Option<(Features, AbsCallConv)>) {
use std::path::PathBuf;
let mut path = PathBuf::from(&self.out_config.output_dir);
path.push(pattern.replace("{}", name));
let fg = self.files.entry(path.clone())
.or_insert_with(|| (fs::File::create(path).unwrap(), None));
(&mut fg.0, &mut fg.1)
}
}
/**
An "abstract" calling convention.
This is to answer the question: "if a function uses the C calling convention, is that the same thing as `"system"`, or do I have to *actually* say `"C"`?"
Without this, almost every Windows API call would need two decls: one with `extern "C"`, and one with `extern "stdcall"`. Yuck.
*/
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
enum AbsCallConv {
ExplicitlyC,
System,
}
impl AbsCallConv {
/// Gets the calling convention as a string, suitable for use with Rust's `extern`.
fn as_str(self) -> &'static str {
use self::AbsCallConv::*;
match self {
ExplicitlyC => "C",
System => "system",
}
}
}
/**
A `TranslationUnit` cache.
*/
pub struct TuCache<'a> {
index: Rc<Index>,
cache: HashMap<TuCacheKey, Rc<TranslationUnit>>,
gen_config: &'a GenConfig,
}
impl<'a> TuCache<'a> {
pub fn new(index: Rc<Index>, gen_config: &'a GenConfig) -> TuCache<'a> {
TuCache {
index: index,
cache: HashMap::new(),
gen_config: gen_config,
}
}
/**
Parse a translation unit with the given expansion config.
Unsurprisingly, this will return a cached TU if one has already been parsed.
*/
pub fn parse_translation_unit(
&mut self,
path: &str,
exp_config: &ExpConfig,
) -> Result<Rc<TranslationUnit>, clang::ErrorCode> {
let index_opts = TranslationUnitFlags::None
| TranslationUnitFlags::DetailedPreprocessingRecord
| TranslationUnitFlags::Incomplete
;
let key = TuCacheKey::new(path, exp_config);
info!("parsing tu {:?} with {:?} ({:?})", path, exp_config, key);
if let Some(rc_tu) = self.cache.get(&key) {
info!(".. already in cache");
return Ok(rc_tu.clone())
}
let switches: Vec<String> = self.gen_config.switches().iter().map(|s| s.clone())
.chain(exp_config.switches().into_iter())
.collect();
let tu = try!(self.index.parse_translation_unit(
path,
&switches,
&[],
index_opts,
));
self.cache.insert(key, tu.clone());
Ok(tu)
}
}
/**
This is the unique key for each entry in the `TuCache`.
*/
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct TuCacheKey(String, ExpConfig);
impl TuCacheKey {
pub fn new(path: &str, exp_config: &ExpConfig) -> TuCacheKey {
TuCacheKey(path.into(), exp_config.clone())
}
}
Re-enabled stub output.
/**
This module contains the bulk of the header-processing code, and the core structures.
Note that it specifically *does not* contain conditional expression handling, or feature set abstractions.
*/
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fs;
use std::path;
use std::rc::Rc;
use itertools::Itertools;
use {ExpConfig, GenConfig, NativeCallConv, OutConfig, WinVersion};
use clang::{
self,
Index, RcIndexExt,
TranslationUnit, TranslationUnitFlags,
Cursor, CursorKind,
};
use features::{Features, scan_features};
use util;
const EMIT_STUBS: bool = true;
/**
This is effectively the "entry point" for processing. Given a header and a configuration, it attempts to generate a Rust binding.
*/
pub fn process_header(path: &str, gen_config: &GenConfig, out_config: &OutConfig) {
info!("using clang version {}", clang::version());
let index = Index::create(
/*exclude_declarations_from_pch*/ false,
/*display_diagnostics*/ false,
);
let mut out_items = OutputItems::new();
let mut cache = Cache::new(index, gen_config);
// Expand once for each expansion config.
for exp_config in &gen_config.exp_configs {
info!("expanding with config {:?}", exp_config);
info!(".. switches: {:?}", exp_config.switches());
let tu = cache.tu.parse_translation_unit(path, exp_config).ok().expect("parse TU");
let renames = scan_for_renames(tu.clone(), gen_config);
process_decls(tu, gen_config, exp_config, &mut out_items, &mut cache, &renames);
}
info!("generating output...");
let out_files = &mut OutputFiles::new(out_config);
output_header_items(&out_items, out_files);
output_func_items(&out_items, out_files, out_config);
info!("sanity-checking features...");
sanity_check_features(&mut cache);
}
/**
A helper method that yields the "next" cursor to process from both a primary sequence and a list of deferred items.
*/
fn next_from(
dc: &mut ::std::vec::IntoIter<Cursor>,
d: &mut Vec<Cursor>,
di: &mut Option<::std::vec::IntoIter<Cursor>>
) -> Option<Cursor> {
use std::mem::replace;
if let Some(cur) = di.as_mut().and_then(|di| di.next()) {
return Some(cur)
}
*di = None;
if d.len() > 0 {
*di = Some(replace(d, vec![]).into_iter());
return next_from(dc, d, di);
}
dc.next()
}
struct Renames {
renames: HashMap<Cursor, Cursor>,
invalidations: HashSet<Cursor>,
}
impl Renames {
fn add_rename(&mut self, from: Cursor, to: Cursor) {
assert!(!self.is_renamed(&from), "definition is already renamed");
assert!(!self.invalidations.contains(&to), "already have invalidation");
self.renames.insert(from, to.clone());
self.invalidations.insert(to);
}
fn is_invalidated(&self, decl_cur: &Cursor) -> bool {
self.invalidations.contains(decl_cur)
}
fn is_renamed(&self, defn_cur: &Cursor) -> bool {
self.renames.contains_key(defn_cur)
}
fn rename_decl<'a, 'b>(&'a self, decl_cur: &'b Cursor) -> Result<&'a Cursor, &'b Cursor> {
if let Some(&ref cur) = self.renames.get(decl_cur) {
Ok(cur)
} else {
Err(decl_cur)
}
}
fn rename_ty(&self, ty: clang::Type) -> Result<clang::Cursor, clang::Type> {
use clang::TypeKind as TK;
// We're only concerned about structs, enums and unions.
match ty.kind() {
TK::Record | TK::Enum => {
let ty_decl = match ty.declaration().definition() {
Some(cur) => cur,
None => return Err(ty)
};
if let Some(&ref cur) = self.renames.get(&ty_decl) {
return Ok(cur.clone());
}
Err(ty)
},
_ => Err(ty)
}
}
}
impl Default for Renames {
fn default() -> Self {
Renames {
renames: HashMap::new(),
invalidations: HashSet::new(),
}
}
}
fn scan_for_renames(tu: Rc<TranslationUnit>, gen_config: &GenConfig) -> Renames {
/*
The goal here is to find two kinds of things:
1. Types which have *no* name in tag space, but are given one via typedef.
2. Types which *have* a name in tag space, but the *canonical* name is given via typedef.
We handle both of these by scanning through all the typedefs. If we find one whose subject is one of the above types, we record the subject's cursor and the *new* name, as well as an "invalidation" of the typedef's cursor.
*/
info!("scanning for renames...");
let mut renames = Renames::default();
let mut decl_curs = tu.cursor().children().into_iter();
let mut deferred: Vec<Cursor> = vec![];
let mut deferred_iter = None;
while let Some(decl_cur) = next_from(&mut decl_curs, &mut deferred, &mut deferred_iter) {
if !gen_config.should_ignore(&decl_cur) {
let rename = scan_decl_for_rename(decl_cur, gen_config, &renames, &mut |cur| deferred.push(cur));
if let Some((from, to)) = rename {
renames.add_rename(from, to);
}
}
}
renames
}
fn scan_decl_for_rename<Defer>(
decl_cur: Cursor,
gen_config: &GenConfig,
renames: &Renames,
mut defer: Defer,
) -> Option<(Cursor, Cursor)>
where Defer: FnMut(Cursor) {
use clang::CursorKind as CK;
use clang::TypeKind as TK;
match decl_cur.kind() {
CK::TypedefDecl => (),
_ => return None
}
debug!("scan_decl_for_rename({}, ..)", decl_cur);
let ty = decl_cur.typedef_decl_underlying_type();
// Resolve unexposed types if possible.
let ty = match ty.kind() {
TK::Unexposed => ty.canonical(),
_ => ty
};
// We don't want to even look at things that aren't just direct ADT typedefs.
match ty.kind() {
TK::Record | TK::Enum => (),
other => {
debug!(".. ignoring indirect typedef: {:?}", other);
return None;
}
}
// Get the original type definition.
let ty_defn = ty.declaration().definition();
let ty_defn = match ty_defn {
Some(c) => c,
None => {
debug!(".. ignoring; has no definition");
return None;
}
};
// Double-check that this is really what we think it is.
match ty_defn.kind() {
CK::StructDecl | CK::EnumDecl | CK::UnionDecl => (),
other => {
debug!(".. ignoring non-adt target: {:?}", other);
return None;
}
};
// Defer all the child cursors.
for child in ty_defn.children() { defer(child); }
// Check if it's already canonical.
let ty_name = ty_defn.spelling();
if !gen_config.is_tag_name_non_canonical(&ty_name) {
debug!(".. ignoring canonical name {:?}", ty_name);
return None;
}
// Finally, make sure we aren't renaming it more than once.
if renames.is_renamed(&ty_defn) {
debug!(".. ignoring; already renamed");
return None;
}
// Do the rename.
debug!(".. renaming {} to {}", ty_defn, decl_cur);
Some((ty_defn, decl_cur))
}
fn process_decls(
tu: Rc<TranslationUnit>,
gen_config: &GenConfig,
exp_config: &ExpConfig,
output: &mut OutputItems,
cache: &mut Cache,
renames: &Renames,
) {
let feat_mask = exp_config.arch.to_features();
let mut decl_curs = tu.cursor().children().into_iter();
let mut deferred: Vec<Cursor> = vec![];
let mut deferred_iter = None;
while let Some(decl_cur) = next_from(&mut decl_curs, &mut deferred, &mut deferred_iter) {
/*
Something to be aware of: a symbol might match the ignore patterns, but have been renamed to something that *doesn't*. We need to check for this.
*/
if gen_config.should_ignore(&decl_cur) && !renames.is_renamed(&decl_cur) {
debug!("ignoring: {}", decl_cur);
} else if renames.is_invalidated(&decl_cur) {
debug!("invalidated: {}", decl_cur);
} else {
process_decl(
decl_cur,
feat_mask.clone(),
exp_config.native_cc,
output,
cache,
renames,
&mut |cur| deferred.push(cur),
);
}
}
}
fn output_header_items(items: &OutputItems, output: &mut OutputFiles) {
let mut lines = vec![];
for (_, decls) in &items.header_items {
for &(idx, ref header, ref feat, ref decl, ref annot) in decls {
lines.push((header, idx, feat, decl, annot));
}
}
lines.sort();
let lines = lines.into_iter()
.group_by_lazy(|&(header, _, feat, _, _)| (header, feat));
for ((header, feat), group) in &lines {
for (_, _, _, decl, annot) in group {
output.emit_to_header(header, feat, decl, annot);
}
}
}
fn output_func_items(items: &OutputItems, output: &mut OutputFiles, out_config: &OutConfig) {
let mut lines = vec![];
for (name, decls) in &items.fn_items {
for &(_, ref feat, ref cconv, ref decl, ref annot) in decls {
for &ref lib in out_config.get_fn_libs(name) {
lines.push((lib, feat, name, cconv, decl, annot));
}
}
}
lines.sort();
let lines = lines.into_iter()
.group_by_lazy(|&(lib, feat, _, cconv, _, _)| (lib, feat, cconv));
for ((lib, feat, cconv), group) in &lines {
for (_, _, _, _, decl, annot) in group {
output.emit_to_library(lib, feat, cconv, decl, annot);
}
}
}
fn sanity_check_features(cache: &mut Cache) {
use std::collections::BTreeSet;
let mut weird_vers = BTreeSet::new();
cache.iter_features(|path, line, &ref feat| {
use features::Partitions;
/*
What we're looking for are any features that might mess up the expansion. This currently means:
- Features with upper limits on versions.
- Features that *do not* target the desktop.
*/
let mut suspect = vec![];
if let Some(ref parts) = feat.parts {
if (parts.clone() & Partitions::DesktopApp).is_empty() {
suspect.push("non-desktop-app");
}
}
if let Some(ref winver) = feat.winver {
if !winver.is_simple() {
for &ref range in winver.ranges() {
weird_vers.insert(range.end);
}
suspect.push("complex-winver");
}
}
if suspect.len() != 0 {
warn!("suspect feature set: {}:{}: {} {:?}",
path, line, suspect.connect(", "), feat);
}
});
if weird_vers.len() > 0 {
warn!("suspect versions:");
for ver in weird_vers {
warn!(".. 0x{:08x} - {:?}",
ver, WinVersion::from_u32_round_up(ver));
}
}
}
/**
Processes a single declaration.
*/
fn process_decl<Defer>(
decl_cur: Cursor,
feat_mask: Features,
native_cc: NativeCallConv,
output: &mut OutputItems,
cache: &mut Cache,
renames: &Renames,
defer: &mut Defer,
)
where Defer: FnMut(Cursor)
{
use clang::CursorKind as CK;
let decl_kind = match decl_cur.kind() {
CK::InclusionDirective
| CK::MacroInstantiation
=> return,
kind => kind
};
let decl_loc = decl_cur.location();
debug!("process_decl: {}: {:?} {}",
decl_loc.display_short(),
decl_kind,
decl_cur.spelling());
let feat = decl_loc.file()
.map(|file| get_features_at(file, decl_loc.line(), cache))
.unwrap_or_else(|| Features::default());
debug!(".. process_decl feat: {:?}", feat);
/*
This is kind of a pain, but as it turns out, different architectures can cause some things to behave in weird ways. For example, `DWORD` might be a typedef on one arch, but a macro on another, which leads to a different expansion. Hooray!
*/
let feat = match feat.and(feat_mask).check_valid() {
Ok(feat) => feat,
Err(err) => {
/*
This is *very definitely* a problem. This means that the pre-processor has emitted code that our feature set says we shouldn't ever reach!
This generally means one of two things: either the feature set calculation is wrong *or* the set of pre-defined symbols is incomplete/incorrect.
*/
panic!(".. invalid feature set for {}: {}", decl_cur, err)
},
};
let decl_cur_copy = decl_cur.clone();
let result = match decl_kind {
CK::InclusionDirective
| CK::MacroInstantiation
=> unreachable!(),
CK::StructDecl => process_struct_decl(decl_cur, output, feat, renames, defer),
CK::UnionDecl => process_union_decl(decl_cur, output, feat, renames, defer),
CK::EnumDecl => process_enum_decl(decl_cur, output, feat, renames, defer),
CK::FunctionDecl => process_function_decl(decl_cur, output, feat, renames, native_cc),
CK::TypedefDecl => process_typedef_decl(decl_cur, output, feat, renames),
CK::MacroDefinition => process_macro_defn(decl_cur, output, feat),
kind => {
warn!("could-not-translate unsupported {:?} {} at {}",
kind, decl_cur.spelling(), decl_loc.display_short());
Ok(())
}
};
if let Err(err) = result {
warn!("could-not-translate misc {}: {}", decl_cur_copy, err);
}
}
fn file_stem(cur: &Cursor) -> String {
cur.location().file().expect("valid file for file_stem").name()
}
/**
Process a single structure declaration.
*/
fn process_struct_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
use clang::CursorKind as CK;
debug!("process_struct_decl({}, ..)", decl_cur);
let (name, header) = match renames.rename_decl(&decl_cur) {
Ok(cur) => {
debug!(".. was renamed to {}", cur);
(cur.spelling(), file_stem(&cur))
},
Err(cur) => (try!(name_for_maybe_anon(&cur)), file_stem(&cur))
};
let annot = decl_cur.location().display_short().to_string();
match (decl_cur.is_definition(), decl_cur.definition().is_none()) {
(false, false) => {
debug!(".. skipping forward declaration");
return Ok(());
},
(false, true) => {
// There *is no* definition!
debug!(".. no definition found");
let decl = format!("#[repr(C)] pub struct {};", name);
output.add_header_item(name, header, feat, decl, annot);
return Ok(())
},
(true, _) => ()
}
let mut fields = vec![];
for child_cur in decl_cur.children() {
match child_cur.kind() {
CK::StructDecl
| CK::UnionDecl
=> {
// Defer.
defer(child_cur);
},
CK::FieldDecl => {
let name = child_cur.spelling();
let ty = match trans_type(child_cur.type_(), renames) {
Ok(ty) => ty,
Err(err) => {
if EMIT_STUBS {
// TODO: just stub for now.
let decl = format!("#[repr(C)] pub struct {}; /* ERR STUB! */", name);
output.add_header_item(name, header, feat, decl, annot);
}
return Err(err);
}
};
fields.push(format!("{}: {}", name, ty));
},
CK::UnexposedAttr => {
// Skip.
},
other => panic!("nyi {:?}", other)
}
}
let decl = match fields.len() {
// Why did this have to be special-cased? :(
0 => format!(
"#[repr(C)] pub struct {name};",
name = name,
),
_ => format!(
"#[repr(C)] pub struct {name} {{ {fields} }}",
name = name,
fields = fields.connect(", "),
)
};
output.add_header_item(name, header, feat, decl, annot);
Ok(())
}
/**
Process a single union declaration.
*/
fn process_union_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
_renames: &Renames,
_defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
debug!("process_union_decl({}, ..)", decl_cur);
let name = try!(name_for_maybe_anon(&decl_cur));
let header = file_stem(&decl_cur);
let annot = decl_cur.location().display_short().to_string();
if EMIT_STUBS {
let decl = format!(
"#[repr(C)] pub /*union*/ struct {name}; /* STUB! */",
name = name,
);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Process a single enum declaration.
*/
fn process_enum_decl<Defer>(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
_renames: &Renames,
_defer: &mut Defer,
) -> Result<(), String>
where Defer: FnMut(Cursor)
{
debug!("process_enum_decl({}, ..)", decl_cur);
let name = try!(name_for_maybe_anon(&decl_cur));
let header = file_stem(&decl_cur);
let annot = decl_cur.location().display_short().to_string();
if EMIT_STUBS {
let decl = format!(
"#[repr(C)] pub enum {name}; /* STUB! */",
name = name,
);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Works out a name for the given structure, even if it doesn't otherwise *have* one.
*/
fn name_for_maybe_anon(decl_cur: &Cursor) -> Result<String, String> {
// TODO: Use clang_Cursor_isAnonymous once its released.
let name = decl_cur.spelling();
if name == "" {
/*
This is *probably* an anonymous type. We need to give it a name that will be both reasonable *and* stable across invocations.
*/
return Err(format!("anonymous-struct {}", decl_cur));
}
Ok(name)
}
/**
Process a single function declaration.
*/
fn process_function_decl(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
native_cc: NativeCallConv
) -> Result<(), String> {
use clang::CallingConv as CC;
use ::NativeCallConv as NCC;
debug!("process_function_decl({}, _)", decl_cur);
// Is this an inline function?
let children = decl_cur.children();
if children.len() > 0 && children.last().unwrap().kind() == CursorKind::CompoundStmt {
// Err... *might be*
return Err("inline-fn".into());
}
let ty = decl_cur.type_();
let cconv = match (ty.calling_conv(), native_cc) {
(CC::C, NCC::C) => AbsCallConv::System,
(CC::C, _) => AbsCallConv::ExplicitlyC,
(CC::X86StdCall, NCC::Stdcall) => AbsCallConv::System,
(cconv, _) => {
return Err(format!("bad-cconv {:?}", cconv));
}
};
let name = decl_cur.spelling();
let res_ty = if ty.result().kind() == clang::TypeKind::Void {
String::new()
} else {
format!(" -> {}", try!(trans_type(ty.result(), renames)))
};
let arg_tys: Vec<String> = try!(ty.args().into_iter().map(|ty| trans_type(ty, renames)).collect());
let arg_tys = arg_tys.connect(", ");
let decl = format!(
r#"pub fn {name}({arg_tys}){res_ty};"#,
name = name,
arg_tys = arg_tys,
res_ty = res_ty,
);
let annot = decl_cur.location().display_short().to_string();
output.add_func_item(name, feat, cconv, decl, annot);
Ok(())
}
/**
Process a single structure declaration.
*/
fn process_typedef_decl(
decl_cur: Cursor,
output: &mut OutputItems,
feat: Features,
renames: &Renames,
) -> Result<(), String> {
debug!("process_typedef_decl({}, ..)", decl_cur);
let name = decl_cur.spelling();
let header = file_stem(&decl_cur);
let ty = decl_cur.typedef_decl_underlying_type();
let ty = try!(trans_type(ty, renames));
let decl = format!("pub type {} = {};", name, ty);
let annot = decl_cur.location().display_short().to_string();
output.add_header_item(name, header, feat, decl, annot);
Ok(())
}
/**
Process a single macro definition.
*/
fn process_macro_defn(defn_cur: Cursor, output: &mut OutputItems, feat: Features) -> Result<(), String> {
debug!("process_macro_defn({}, ..)", defn_cur);
let name = defn_cur.spelling();
let header = file_stem(&defn_cur);
let annot = defn_cur.location().display_short().to_string();
// Note: we skip the last token because it's just a newline.
let toks = defn_cur.tokenize();
let first_tok = toks.at(0);
let next_tok = toks.get(1);
let toks: Vec<_> = toks.into_iter().dropping(1).dropping_back(1).map(|t| t.spelling()).collect();
// Work out whether this is a functionish macro or not.
let is_fn_macro = {
let first_col = first_tok.extent().expect("extent for macro first tok").end().column();
let next_col = next_tok.map(|t| t.extent().expect("extent for macro next tok").end().column()).unwrap_or(!0);
first_col + 1 == next_col
};
if EMIT_STUBS {
let spacer = if is_fn_macro { "" } else { " " };
let defn = toks.connect(" ");
let decl = format!("// #define {}{}{}", name, spacer, defn);
output.add_header_item(name, header, feat, decl, annot);
}
Ok(())
}
/**
Translate a type into an equivalent Rust type reference.
Note that this **is not** for translating type declarations; you cannot just pass a structure definition.
*/
fn trans_type(ty: clang::Type, renames: &Renames) -> Result<String, String> {
use clang::TypeKind as TK;
debug!("trans_type({:?} {:?}, _)", ty.kind(), ty.spelling());
/**
This works out the module qualifier for a given type. This is intended to let you put types into files based on their source header.
*/
fn mod_qual(cur: &Cursor) -> String {
let file = cur.location().file();
match file.map(|f| f.name()) {
Some(name) => format!("::{}::", name),
None => String::new()
}
}
let ty = match renames.rename_ty(ty) {
Ok(cur) => {
// Use whatever we've been given and don't look too closely...
let qual = mod_qual(&cur);
return Ok(format!("{}{}", qual, cur.spelling()));
},
Err(ty) => ty
};
match ty.kind() {
TK::Invalid => Err("invalid type".into()),
TK::Unexposed => {
let canon_ty = ty.canonical();
match canon_ty.kind() {
TK::Unexposed => Err(format!("recursively unexposed type {}", canon_ty.spelling())),
_ => trans_type(canon_ty, renames)
}
},
// Basic types.
TK::Void => Ok("libc::c_void".into()),
TK::Char_U | TK::UChar => Ok("libc::c_uchar".into()),
TK::Char16 => Ok("u16".into()),
// **Note**: *not* `char` because C++ doesn't appear to guarantee that a value of type char32_t is a valid UTF-32 code unit.
TK::Char32 => Ok("u32".into()),
TK::UShort => Ok("libc::c_ushort".into()),
TK::UInt => Ok("libc::c_uint".into()),
TK::ULong => Ok("libc::c_ulong".into()),
TK::ULongLong => Ok("libc::c_ulonglong".into()),
TK::Char_S => Ok("libc::c_schar".into()),
TK::SChar => Ok("libc::c_schar".into()),
TK::WChar => Ok("libc::wchar_t".into()),
TK::Short => Ok("libc::c_short".into()),
TK::Int => Ok("libc::c_int".into()),
TK::Long => Ok("libc::c_long".into()),
TK::LongLong => Ok("libc::c_longlong".into()),
TK::Float => Ok("libc::c_float".into()),
TK::Double => Ok("libc::c_double".into()),
TK::NullPtr => Ok("*mut libc::c_void".into()),
// Constructed types.
TK::Pointer => {
// We want to know whether the thing we're pointing to is const or not.
let pointee_ty = ty.pointee();
let mut_ = if pointee_ty.is_const_qualified() { "const" } else { "mut" };
Ok(format!("*{} {}", mut_, try!(trans_type(pointee_ty, renames))))
},
TK::Record
| TK::Enum
| TK::Typedef
=> {
// **Note**: use the decl to avoid const-qualification. This might not be correct.
let decl_cur = ty.declaration();
Ok(format!("{}{}", mod_qual(&decl_cur), decl_cur.spelling()))
},
TK::ConstantArray => {
let elem_ty = ty.array_element_type();
let mut_ = if elem_ty.is_const_qualified() { "const" } else { "mut" };
let len = ty.array_size();
Ok(format!("*{} [{}; {}]", mut_, try!(trans_type(elem_ty, renames)), len))
},
TK::IncompleteArray => {
let elem_ty = ty.array_element_type();
let mut_ = if elem_ty.is_const_qualified() { "const" } else { "mut" };
Ok(format!("*{} {}", mut_, try!(trans_type(elem_ty, renames))))
},
// **Note**: This isn't currently in `libc`, and does *not* have a platform-independent definition.
TK::Bool
| TK::UInt128
| TK::Int128
| TK::LongDouble
| TK::Overload
| TK::Dependent
| TK::ObjCId
| TK::ObjCClass
| TK::ObjCSel
| TK::Complex
| TK::BlockPointer
| TK::LValueReference
| TK::RValueReference
| TK::ObjCInterface
| TK::ObjCObjectPointer
| TK::FunctionNoProto
| TK::FunctionProto
| TK::Vector
| TK::VariableArray
| TK::DependentSizedArray
| TK::MemberPointer
=> Err(format!("unsupported type {:?}", ty.kind()))
}
}
/**
Calculate the feature set map for a given file.
*/
fn get_all_features<'a>(file: clang::File, cache: &'a mut Cache) -> &'a BTreeMap<u32, Features> {
let path = file.file_name();
let tu_cache = &mut cache.tu;
let fmap = cache.features.entry(path.clone()).or_insert_with(||
scan_features(get_token_lines(file, tu_cache)));
fmap
}
/**
Calculate the feature set at a given line.
*/
fn get_features_at(file: clang::File, line: u32, cache: &mut Cache) -> Features {
use std::collections::Bound;
debug!("get_features_at({:?}, {}, _)", file.file_name(), line);
let fmap = get_all_features(file, cache);
fmap.range(Bound::Unbounded, Bound::Included(&line)).next_back()
.map(|(i, v)| {
debug!(".. found: {}: {:?}", i, v);
v.clone()
})
.unwrap_or_else(|| Features::default())
}
/**
Returns a given file as a sequence of `(line_number, tokens)` pairs.
*/
fn get_token_lines(file: clang::File, tu_cache: &mut TuCache) -> Vec<(u32, Vec<clang::Token>)> {
debug!("get_token_lines({:?}, _)", file.file_name());
let path = file.file_name();
// Architecture shouldn't matter since we just want the tokens.
let tu = tu_cache.parse_translation_unit(&path, &ExpConfig::DUMMY_CFG).unwrap();
// Get the set of line numbers which *contain* a line continuation.
let cont_lines: Vec<_> = util::read_lines(&path)
.map(|rs| rs.unwrap())
.enumerate()
.filter(|&(_, ref s)| s.trim_right().ends_with("\\"))
// +1 because enumerate is 0-based, line numbers are 1-based
.map(|(i, _)| (i + 1) as u32) // TODO: checked
.collect();
// Work out the starting line for continued lines.
let mut line_starts = HashMap::new();
for i in cont_lines {
let start = *line_starts.get(&i).unwrap_or(&i);
line_starts.insert(i+1, start);
}
// Change each line's line number to be the *first* line the (possibly continued) line starts on.
let remap_line_number = |tok: &clang::Token| {
let l = tok.location().line();
*line_starts.get(&l).unwrap_or(&l)
};
// Grab all the tokens, then re-group them by logical line.
tu.tokenize().into_iter().group_by(remap_line_number).collect()
}
/**
Bundles together any caches we need for efficiency.
*/
struct Cache<'a> {
/// Parsed Clang `TranslationUnit`s.
tu: TuCache<'a>,
/// Evaluated per-line feature sets.
features: HashMap<String, BTreeMap<u32, Features>>,
}
impl<'a> Cache<'a> {
fn new(index: Rc<Index>, gen_config: &'a GenConfig) -> Self {
Cache {
tu: TuCache::new(index, gen_config),
features: HashMap::new(),
}
}
/**
Iterates over all feature sets.
Note that this works by iterating over the underlying feature set maps. What this gives you *in effect* is the evaluated feature set of *every* conditional compilation branch.
*/
fn iter_features<F>(&mut self, mut f: F)
where F: FnMut(&str, u32, &Features) {
for (&ref name, &ref map) in self.features.iter() {
for (&line, &ref feat) in map.iter() {
f(name, line, feat);
}
}
}
}
/**
Used to centralise how output of translated items is done.
One of the major reasons for this is to consolidate disparate bindings. That is, if the output for both x86 and x86-64 are the same, then they should use a *single* declaration with an appropriate `#[cfg]` attribute.
Note that `annot` is used for "annotations", which are free-form strings that may be emitted as comments in the output. These are handy for identifying, for example, *where* a declaration originally came from, for debugging purposes.
*/
struct OutputItems {
next_seq_id: u64,
/// `[name => [(feat, cconv, decl, annot)]]`
fn_items: HashMap<String, Vec<(u64, Features, AbsCallConv, String, String)>>,
/// `[name => [(header, feat, decl, annot)]]`
header_items: HashMap<String, Vec<(u64, String, Features, String, String)>>,
}
impl OutputItems {
fn new() -> Self {
OutputItems {
next_seq_id: 0,
fn_items: HashMap::new(),
header_items: HashMap::new(),
}
}
/**
Adds a function declaration.
If the given `decl` matches an already existing `decl` with the same `name`, the existing entry will have its feature set unioned with `feat`, and `annot` appended to its annotation.
*/
fn add_func_item(&mut self, name: String, feat: Features, cconv: AbsCallConv, decl: String, annot: String) {
use std::mem::replace;
debug!("add_func_item({:?}, {:?}, {:?}, {:?}, {:?})", name, feat, cconv, decl, annot);
let decls = self.fn_items.entry(name).or_insert(vec![]);
// Is there already a decl which is compatible with this one?
for &mut (_, ref mut df, ref dcc, ref dd, ref mut da) in decls.iter_mut() {
if *dd == decl && *dcc == cconv {
debug!(".. merging");
// The decls are the same. Just combine the feature sets together.
let new_df = replace(df, Features::default()).or(feat);
*df = new_df;
if *da != annot {
da.push_str(", ");
da.push_str(&annot);
}
return;
}
}
// Add it to the set of decls.
debug!(".. adding");
decls.push((self.next_seq_id, feat, cconv, decl, annot));
self.next_seq_id += 1;
}
/**
Adds a header declaration.
If the given `decl` matches an already existing `decl` with the same `name`, the existing entry will have its feature set unioned with `feat`, and `annot` appended to its annotation.
*/
fn add_header_item(&mut self, name: String, header: String, feat: Features, decl: String, annot: String) {
use std::mem::replace;
debug!("add_header_item({:?}, {:?}, {:?}, {:?}, {:?})", header, name, feat, decl, annot);
let decls = self.header_items.entry(name).or_insert(vec![]);
// Is there already a decl which is compatible with this one?
for &mut (_, ref dh, ref mut df, ref dd, ref mut da) in decls.iter_mut() {
if *dh == header && *dd == decl {
debug!(".. merging");
// The decls are the same. Just combine the feature sets together.
let new_df = replace(df, Features::default()).or(feat);
*df = new_df;
da.push_str(", ");
da.push_str(&annot);
return;
}
}
// Add it to the set of decls.
debug!(".. adding");
decls.push((self.next_seq_id, header, feat, decl, annot));
self.next_seq_id += 1;
}
}
/**
This cache owns the output files and saves us from constantly opening and closing them.
*/
struct OutputFiles<'a> {
out_config: &'a OutConfig,
files: HashMap<path::PathBuf, (fs::File, Option<(Features, AbsCallConv)>)>,
}
impl<'a> OutputFiles<'a> {
fn new(out_config: &'a OutConfig) -> Self {
OutputFiles {
out_config: out_config,
files: HashMap::new(),
}
}
fn emit_to_header(&mut self, name: &str, feat: &Features, decl: &str, annot: &str) {
use std::io::prelude::*;
let (file, _) = self.get_file(name, &self.out_config.header_path);
writeln!(file, "{}{} /* {} */", feat, decl, annot).unwrap();
}
fn emit_to_library(&mut self, name: &str, feat: &Features, cconv: &AbsCallConv, decl: &str, annot: &str) {
use std::io::prelude::*;
let (file, group) = self.get_file(name, &self.out_config.library_path);
match *group {
Some((ref gf, ref gcc)) if gf == feat && gcc == cconv => (),
Some(_) => {
writeln!(file, "}}\n{}\nextern {:?} {{", feat, cconv.as_str()).unwrap();
},
None => {
writeln!(file, "{}\nextern {:?} {{", feat, cconv.as_str()).unwrap();
}
}
writeln!(file, " {} /* {} */", decl, annot).unwrap();
*group = Some((feat.clone(), cconv.clone()));
}
fn get_file<'b>(
&'b mut self,
name: &str,
pattern: &str
) -> (&'b mut fs::File, &'b mut Option<(Features, AbsCallConv)>) {
use std::path::PathBuf;
let mut path = PathBuf::from(&self.out_config.output_dir);
path.push(pattern.replace("{}", name));
let fg = self.files.entry(path.clone())
.or_insert_with(|| (fs::File::create(path).unwrap(), None));
(&mut fg.0, &mut fg.1)
}
}
/**
An "abstract" calling convention.
This is to answer the question: "if a function uses the C calling convention, is that the same thing as `"system"`, or do I have to *actually* say `"C"`?"
Without this, almost every Windows API call would need two decls: one with `extern "C"`, and one with `extern "stdcall"`. Yuck.
*/
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
enum AbsCallConv {
ExplicitlyC,
System,
}
impl AbsCallConv {
/// Gets the calling convention as a string, suitable for use with Rust's `extern`.
fn as_str(self) -> &'static str {
use self::AbsCallConv::*;
match self {
ExplicitlyC => "C",
System => "system",
}
}
}
/**
A `TranslationUnit` cache.
*/
pub struct TuCache<'a> {
index: Rc<Index>,
cache: HashMap<TuCacheKey, Rc<TranslationUnit>>,
gen_config: &'a GenConfig,
}
impl<'a> TuCache<'a> {
pub fn new(index: Rc<Index>, gen_config: &'a GenConfig) -> TuCache<'a> {
TuCache {
index: index,
cache: HashMap::new(),
gen_config: gen_config,
}
}
/**
Parse a translation unit with the given expansion config.
Unsurprisingly, this will return a cached TU if one has already been parsed.
*/
pub fn parse_translation_unit(
&mut self,
path: &str,
exp_config: &ExpConfig,
) -> Result<Rc<TranslationUnit>, clang::ErrorCode> {
let index_opts = TranslationUnitFlags::None
| TranslationUnitFlags::DetailedPreprocessingRecord
| TranslationUnitFlags::Incomplete
;
let key = TuCacheKey::new(path, exp_config);
info!("parsing tu {:?} with {:?} ({:?})", path, exp_config, key);
if let Some(rc_tu) = self.cache.get(&key) {
info!(".. already in cache");
return Ok(rc_tu.clone())
}
let switches: Vec<String> = self.gen_config.switches().iter().map(|s| s.clone())
.chain(exp_config.switches().into_iter())
.collect();
let tu = try!(self.index.parse_translation_unit(
path,
&switches,
&[],
index_opts,
));
self.cache.insert(key, tu.clone());
Ok(tu)
}
}
/**
This is the unique key for each entry in the `TuCache`.
*/
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct TuCacheKey(String, ExpConfig);
impl TuCacheKey {
pub fn new(path: &str, exp_config: &ExpConfig) -> TuCacheKey {
TuCacheKey(path.into(), exp_config.clone())
}
}
|
use std::mem;
use std::ptr;
use std::marker::PhantomData;
use std::ffi::CString;
use super::gl_lib as gl;
use super::gl_lib::types::*;
use super::types::GLError;
use super::context::Context;
use super::shader::Shader;
pub struct Program {
gl_id: GLuint
}
impl Program {
pub fn gl_id(&self) -> GLuint {
self.gl_id
}
}
impl Drop for Program {
fn drop(&mut self) {
unsafe {
gl::DeleteProgram(self.gl_id);
}
}
}
impl Context {
pub fn create_program(&self) -> Result<Program, ()> {
unsafe {
let id = gl::CreateProgram();
if id > 0 {
Ok(Program { gl_id: id })
}
else {
Err(())
}
}
}
pub fn attach_shader(&self, program: &mut Program, shader: &Shader) {
unsafe {
gl::AttachShader(program.gl_id(), shader.gl_id());
}
}
pub fn link_program(&self, program: &mut Program) -> Result<(), GLError> {
let success = unsafe {
gl::LinkProgram(program.gl_id());
let mut link_status : GLint = 0;
gl::GetProgramiv(program.gl_id(),
gl::LINK_STATUS,
&mut link_status as *mut GLint);
link_status == gl::TRUE as GLint
};
if success {
Ok(())
}
else {
unsafe {
let mut info_length : GLint = mem::uninitialized();
gl::GetProgramiv(program.gl_id(),
gl::INFO_LOG_LENGTH,
&mut info_length as *mut GLint);
let mut bytes = Vec::<u8>::with_capacity(info_length as usize);
gl::GetProgramInfoLog(program.gl_id(),
info_length,
ptr::null_mut(),
bytes.as_mut_ptr() as *mut GLchar);
bytes.set_len((info_length - 1) as usize);
let msg = String::from_utf8(bytes)
.unwrap_or(String::from("<Unknown error>"));
Err(GLError { message: msg })
}
}
}
pub fn get_attrib_location(&self, program: &Program, name: &str)
-> Result<ProgramAttrib, ()>
{
let c_str = try!(CString::new(name).or(Err(())));
let str_ptr = c_str.as_ptr() as *const GLchar;
unsafe {
let index = gl::GetAttribLocation(program.gl_id(), str_ptr);
if index >= 0 {
Ok(ProgramAttrib { gl_index: index as GLuint })
}
else {
Err(())
}
}
}
pub fn get_uniform_location(&self, program: &Program, name: &str)
-> Result<ProgramUniform, ()>
{
let c_str = try!(CString::new(name).or(Err(())));
let str_ptr = c_str.as_ptr() as *const GLchar;
unsafe {
let index = gl::GetUniformLocation(program.gl_id(), str_ptr);
if index >= 0 {
Ok(ProgramUniform { gl_index: index as GLuint })
}
else {
Err(())
}
}
}
}
pub struct ProgramBinding<'a> {
phantom: PhantomData<&'a mut Program>
}
impl<'a> ProgramBinding<'a> {
}
pub struct ProgramBinder;
impl ProgramBinder {
pub fn bind<'a>(&'a mut self, program: &mut Program)
-> ProgramBinding<'a>
{
let binding = ProgramBinding { phantom: PhantomData };
unsafe {
gl::UseProgram(program.gl_id());
}
binding
}
}
#[derive(Debug, Clone, Copy)]
pub struct ProgramAttrib {
pub gl_index: GLuint
}
#[derive(Debug, Clone, Copy)]
pub struct ProgramUniform {
pub gl_index: GLuint
}
Add `ProgramBinding.set_uniform()`
Wraps all the variations of `glUniform*` using `UniformData`
use std::mem;
use std::ptr;
use std::marker::PhantomData;
use std::ffi::CString;
use super::gl_lib as gl;
use super::gl_lib::types::*;
use super::types::GLError;
use super::context::Context;
use super::shader::Shader;
use super::uniform_data::{UniformData, UniformDatumType, UniformPrimitiveType};
pub struct Program {
gl_id: GLuint
}
impl Program {
pub fn gl_id(&self) -> GLuint {
self.gl_id
}
}
impl Drop for Program {
fn drop(&mut self) {
unsafe {
gl::DeleteProgram(self.gl_id);
}
}
}
impl Context {
pub fn create_program(&self) -> Result<Program, ()> {
unsafe {
let id = gl::CreateProgram();
if id > 0 {
Ok(Program { gl_id: id })
}
else {
Err(())
}
}
}
pub fn attach_shader(&self, program: &mut Program, shader: &Shader) {
unsafe {
gl::AttachShader(program.gl_id(), shader.gl_id());
}
}
pub fn link_program(&self, program: &mut Program) -> Result<(), GLError> {
let success = unsafe {
gl::LinkProgram(program.gl_id());
let mut link_status : GLint = 0;
gl::GetProgramiv(program.gl_id(),
gl::LINK_STATUS,
&mut link_status as *mut GLint);
link_status == gl::TRUE as GLint
};
if success {
Ok(())
}
else {
unsafe {
let mut info_length : GLint = mem::uninitialized();
gl::GetProgramiv(program.gl_id(),
gl::INFO_LOG_LENGTH,
&mut info_length as *mut GLint);
let mut bytes = Vec::<u8>::with_capacity(info_length as usize);
gl::GetProgramInfoLog(program.gl_id(),
info_length,
ptr::null_mut(),
bytes.as_mut_ptr() as *mut GLchar);
bytes.set_len((info_length - 1) as usize);
let msg = String::from_utf8(bytes)
.unwrap_or(String::from("<Unknown error>"));
Err(GLError { message: msg })
}
}
}
pub fn get_attrib_location(&self, program: &Program, name: &str)
-> Result<ProgramAttrib, ()>
{
let c_str = try!(CString::new(name).or(Err(())));
let str_ptr = c_str.as_ptr() as *const GLchar;
unsafe {
let index = gl::GetAttribLocation(program.gl_id(), str_ptr);
if index >= 0 {
Ok(ProgramAttrib { gl_index: index as GLuint })
}
else {
Err(())
}
}
}
pub fn get_uniform_location(&self, program: &Program, name: &str)
-> Result<ProgramUniform, ()>
{
let c_str = try!(CString::new(name).or(Err(())));
let str_ptr = c_str.as_ptr() as *const GLchar;
unsafe {
let index = gl::GetUniformLocation(program.gl_id(), str_ptr);
if index >= 0 {
Ok(ProgramUniform { gl_index: index as GLuint })
}
else {
Err(())
}
}
}
}
pub struct ProgramBinding<'a> {
phantom: PhantomData<&'a mut Program>
}
impl<'a> ProgramBinding<'a> {
pub fn set_uniform<T>(&self, uniform: ProgramUniform, val: T)
where T: UniformData
{
let idx = uniform.gl_index as GLint;
let count = val.uniform_elements() as GLsizei;
let ptr = val.uniform_bytes().as_ptr();
unsafe {
match T::uniform_datum_type() {
UniformDatumType::Vec1(p) => {
match p {
UniformPrimitiveType::Float => {
gl::Uniform1fv(idx, count, ptr as *const GLfloat)
},
UniformPrimitiveType::Int => {
gl::Uniform1iv(idx, count, ptr as *const GLint)
}
}
},
UniformDatumType::Vec2(p) => {
match p {
UniformPrimitiveType::Float => {
gl::Uniform2fv(idx, count, ptr as *const GLfloat)
},
UniformPrimitiveType::Int => {
gl::Uniform2iv(idx, count, ptr as *const GLint)
}
}
},
UniformDatumType::Vec3(p) => {
match p {
UniformPrimitiveType::Float => {
gl::Uniform2fv(idx, count, ptr as *const GLfloat)
},
UniformPrimitiveType::Int => {
gl::Uniform2iv(idx, count, ptr as *const GLint)
}
}
},
UniformDatumType::Vec4(p) => {
match p {
UniformPrimitiveType::Float => {
gl::Uniform2fv(idx, count, ptr as *const GLfloat)
},
UniformPrimitiveType::Int => {
gl::Uniform2iv(idx, count, ptr as *const GLint)
}
}
},
UniformDatumType::Matrix2x2 => {
gl::UniformMatrix2fv(idx,
count,
gl::FALSE,
ptr as *const GLfloat)
},
UniformDatumType::Matrix3x3 => {
gl::UniformMatrix3fv(idx,
count,
gl::FALSE,
ptr as *const GLfloat)
},
UniformDatumType::Matrix4x4 => {
gl::UniformMatrix4fv(idx,
count,
gl::FALSE,
ptr as *const GLfloat)
},
}
}
}
}
pub struct ProgramBinder;
impl ProgramBinder {
pub fn bind<'a>(&'a mut self, program: &mut Program)
-> ProgramBinding<'a>
{
let binding = ProgramBinding { phantom: PhantomData };
unsafe {
gl::UseProgram(program.gl_id());
}
binding
}
}
#[derive(Debug, Clone, Copy)]
pub struct ProgramAttrib {
pub gl_index: GLuint
}
#[derive(Debug, Clone, Copy)]
pub struct ProgramUniform {
pub gl_index: GLuint
}
|
use command::Command;
use vm::VM;
pub struct Program {
instructions : Vec<Command>,
instruction_pointer: Option<usize>,
is_seeking: bool,
current_depth: u64,
goal_depth: Option<u64>,
}
impl Program {
pub fn new () -> Program {
Program {
instructions: Vec::new(),
instruction_pointer: None,
is_seeking: false,
current_depth: 0,
goal_depth: None,
}
}
pub fn append(&mut self, instructions: &[Command]) {
self.instructions.extend(instructions.iter().cloned());
if self.instruction_pointer.is_none() { self.instruction_pointer = Some(0); }
}
pub fn execute(&mut self, vm: &mut VM) {
match self.instruction_pointer {
None => {},
Some(mut index) => {
while index < self.instructions.len() {
let command = self.instructions[index];
vm.apply(command);
index = index + 1;
}
self.instruction_pointer = Some(index);
}
}
}
}
Adjust depth when jump commands are seen
use command::Command;
use vm::VM;
pub struct Program {
instructions : Vec<Command>,
instruction_pointer: Option<usize>,
is_seeking: bool,
current_depth: u64,
goal_depth: Option<u64>,
}
impl Program {
pub fn new () -> Program {
Program {
instructions: Vec::new(),
instruction_pointer: None,
is_seeking: false,
current_depth: 0,
goal_depth: None,
}
}
pub fn append(&mut self, instructions: &[Command]) {
self.instructions.extend(instructions.iter().cloned());
if self.instruction_pointer.is_none() { self.instruction_pointer = Some(0); }
}
pub fn execute(&mut self, vm: &mut VM) {
match self.instruction_pointer {
None => {},
Some(mut index) => {
while index < self.instructions.len() {
let command = self.instructions[index];
if command == Command::JumpForward { self.current_depth = self.current_depth + 1}
if command == Command::JumpBackward { self.current_depth = self.current_depth - 1}
vm.apply(command);
index = index + 1;
}
self.instruction_pointer = Some(index);
}
}
}
}
|
use {CameraCalibration, Error, MountCalibration, Result, ScanPosition, utils};
use element::Extension;
use nalgebra::Projective3;
use scan_position::Image;
use std::collections::HashMap;
use std::io::Read;
use std::path::{Path, PathBuf};
use xmltree::Element;
/// A RiSCAN Pro project.
///
/// These are always created by pointing at a path, either the `.RiSCAN` path or the `project.rsp`
/// in that directory:
///
/// ```
/// use riscan_pro::Project;
/// let project1 = Project::from_path("data/project.RiSCAN").unwrap();
/// let project2 = Project::from_path("data/project.RiSCAN/project.rsp").unwrap();
/// assert_eq!(project1, project2);
/// ```
#[derive(Clone, Debug, Serialize, PartialEq)]
pub struct Project {
/// The camera calibrations, by name.
pub camera_calibrations: HashMap<String, CameraCalibration>,
/// The camera mount calibrations, by name.
pub mount_calibrations: HashMap<String, MountCalibration>,
/// The scan positions, by name.
pub scan_positions: HashMap<String, ScanPosition>,
/// The project's own position.
pub pop: Projective3<f64>,
}
impl Project {
/// Creates a project from a filesystem path.
///
/// This path can be either the `.RiSCAN` directory or the contained `project.rsp`.
///
/// # Examples
///
/// ```
/// use riscan_pro::Project;
/// let project = Project::from_path("data/project.RiSCAN").unwrap();
/// ```
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Project> {
use std::fs::File;
let path = rsp_path(path)?;
let file = File::open(path)?;
Project::from_read(file)
}
/// Returns a scan position, as determined by the path.
///
/// # Examples
///
/// ```
/// use riscan_pro::Project;
/// let project = Project::from_path("data/project.RiSCAN").unwrap();
/// let scan_position1 = project.scan_positions.get("SP01").unwrap();
/// let path = "data/project.RiSCAN/SCANS/SP01/SCANPOSIMAGES/SP01 - Image001.csv";
/// let scan_position2 = project.scan_position_from_path(path).unwrap();
/// assert_eq!(scan_position1, scan_position2);
/// ```
pub fn scan_position_from_path<P: AsRef<Path>>(&self, path: P) -> Result<&ScanPosition> {
path.as_ref()
.file_stem()
.map(|file_stem| file_stem.to_string_lossy())
.and_then(|file_stem| {
file_stem.split(" - ").next().and_then(|name| {
self.scan_positions.get(name)
})
})
.ok_or_else(|| Error::ScanPositionFromPath(path.as_ref().to_path_buf()))
}
fn from_read<R: Read>(read: R) -> Result<Project> {
let xml = Element::parse(read)?;
let camera_calibrations = xml.children("calibrations/camcalibs/camcalib_opencv")?
.iter()
.map(|camcalib_opencv| {
let camera_calibration = CameraCalibration::from_element(camcalib_opencv)?;
Ok((camera_calibration.name.clone(), camera_calibration))
})
.collect::<Result<HashMap<_, _>>>()?;
let mount_calibrations = xml.children("calibrations/mountcalibs/mountcalib")?
.iter()
.map(|mountcalib| {
let mount_calibration = MountCalibration::from_element(mountcalib)?;
Ok((mount_calibration.name.clone(), mount_calibration))
})
.collect::<Result<HashMap<_, _>>>()?;
let scan_positions = xml.children("scanpositions/scanposition")?
.iter()
.map(|scanposition| {
let scan_position = ScanPosition::from_element(scanposition)?;
Ok((scan_position.name.clone(), scan_position))
})
.collect::<Result<HashMap<_, _>>>()?;
Ok(Project {
camera_calibrations: camera_calibrations,
mount_calibrations: mount_calibrations,
scan_positions: scan_positions,
pop: utils::parse_projective3(xml.child("pop/matrix")?.as_str()?)?,
})
}
}
impl CameraCalibration {
fn from_element(element: &Element) -> Result<CameraCalibration> {
let version = element.child("version")?.as_str()?;
if version == "2" {
Ok(CameraCalibration {
name: element.child("name")?.as_str()?.to_string(),
cx: element.child("internal_opencv/cx")?.parse_text()?,
cy: element.child("internal_opencv/cy")?.parse_text()?,
fx: element.child("internal_opencv/fx")?.parse_text()?,
fy: element.child("internal_opencv/fy")?.parse_text()?,
k1: element.child("internal_opencv/k1")?.parse_text()?,
k2: element.child("internal_opencv/k2")?.parse_text()?,
k3: element.child("internal_opencv/k3")?.parse_text()?,
k4: element.child("internal_opencv/k4")?.parse_text()?,
p1: element.child("internal_opencv/p1")?.parse_text()?,
p2: element.child("internal_opencv/p2")?.parse_text()?,
tan_max_horz: element.child("angle_extents/tan_max_horz")?.parse_text()?,
tan_max_vert: element.child("angle_extents/tan_max_vert")?.parse_text()?,
tan_min_horz: element.child("angle_extents/tan_min_horz")?.parse_text()?,
tan_min_vert: element.child("angle_extents/tan_min_vert")?.parse_text()?,
width: element.child("intrinsic_opencv/nx")?.parse_text()?,
height: element.child("intrinsic_opencv/ny")?.parse_text()?,
})
} else {
Err(Error::CameraCalibrationVersion(version.to_string()))
}
}
}
impl MountCalibration {
fn from_element(element: &Element) -> Result<MountCalibration> {
Ok(MountCalibration {
name: element.child("name")?.as_str()?.to_string(),
matrix: utils::parse_projective3(element.child("matrix")?.as_str()?)?,
})
}
}
impl ScanPosition {
fn from_element(element: &Element) -> Result<ScanPosition> {
Ok(ScanPosition {
name: element.child("name")?.as_str()?.to_string(),
images: element
.children("scanposimages/scanposimage")?
.iter()
.map(|scanposimage| {
let image = Image::from_element(scanposimage)?;
Ok((image.name.clone(), image))
})
.collect::<Result<HashMap<_, _>>>()?,
sop: utils::parse_projective3(element.child("sop/matrix")?.as_str()?)?,
})
}
}
impl Image {
fn from_element(element: &Element) -> Result<Image> {
Ok(Image {
name: element.child("name")?.as_str()?.to_string(),
cop: utils::parse_projective3(element.child("cop/matrix")?.as_str()?)?,
camera_calibration_name: element.child("camcalib_ref")?.noderef()?.to_string(),
mount_calibration_name: element.child("mountcalib_ref")?.noderef()?.to_string(),
})
}
}
fn rsp_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
if let Some(extension) = path.as_ref().extension() {
let mut path_buf = path.as_ref().to_path_buf();
if extension == "rsp" {
return Ok(path_buf);
} else if extension == "RiSCAN" {
path_buf.push("project.rsp");
return Ok(path_buf);
}
}
let mut path_buf = PathBuf::new();
for component in path.as_ref().iter() {
path_buf.push(component);
if Path::new(component)
.extension()
.map(|e| e == "RiSCAN")
.unwrap_or(false)
{
return rsp_path(path_buf);
}
}
Err(Error::ProjectPath(path_buf))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_path() {
Project::from_path("data/project.RiSCAN").unwrap();
Project::from_path("data/project.RiSCAN/project.rsp").unwrap();
Project::from_path("data/project.RiSCAN/SCANS").unwrap();
assert!(Project::from_path("data").is_err());
}
#[test]
fn mount_calibrations() {
use utils;
let project = Project::from_path("data/project.RiSCAN").unwrap();
let mount_calibration = project
.mount_calibrations
.get("Infratec_VarioCAM_HD_15mm_11-16-2015_Preston")
.unwrap();
let matrix = utils::parse_projective3("-0.010877741999999997 -0.003724941 -0.999933898 0.18508641 0.019274697 0.999806486 -0.0039341460000000013 0.000460517 0.99975505 -0.019316217 -0.01080384 -0.092802787 0 0 0 1").unwrap();
assert_eq!(matrix, **mount_calibration);
}
#[test]
fn scan_position_from_path() {
let project = Project::from_path("data/project.RiSCAN").unwrap();
let scan_position1 = project.scan_positions.get("SP01").unwrap();
let scan_position2 = project
.scan_position_from_path(
"data/project.RiSCAN/SCANS/SP01/SCANPOSIMAGES/SP01 - Image001.csv",
)
.unwrap();
assert_eq!(scan_position1, scan_position2);
}
#[test]
fn only_accept_version_2_camera_calibrations() {
Project::from_path("data/project.RiSCAN").unwrap();
assert!(Project::from_path("data/camera-calibration-version-0.rsp").is_err());
assert!(Project::from_path("data/camera-calibration-version-1.rsp").is_err());
}
}
Add path attribute to project
use {CameraCalibration, Error, MountCalibration, Result, ScanPosition, utils};
use element::Extension;
use nalgebra::Projective3;
use scan_position::Image;
use std::collections::HashMap;
use std::path::{Path, PathBuf};
use xmltree::Element;
/// A RiSCAN Pro project.
///
/// These are always created by pointing at a path, either the `.RiSCAN` path or the `project.rsp`
/// in that directory:
///
/// ```
/// use riscan_pro::Project;
/// let project1 = Project::from_path("data/project.RiSCAN").unwrap();
/// let project2 = Project::from_path("data/project.RiSCAN/project.rsp").unwrap();
/// assert_eq!(project1, project2);
/// ```
#[derive(Clone, Debug, Serialize, PartialEq)]
pub struct Project {
/// The path to the project rsp file.
pub path: PathBuf,
/// The camera calibrations, by name.
pub camera_calibrations: HashMap<String, CameraCalibration>,
/// The camera mount calibrations, by name.
pub mount_calibrations: HashMap<String, MountCalibration>,
/// The scan positions, by name.
pub scan_positions: HashMap<String, ScanPosition>,
/// The project's own position.
pub pop: Projective3<f64>,
}
impl Project {
/// Creates a project from a filesystem path.
///
/// This path can be either the `.RiSCAN` directory or the contained `project.rsp`.
///
/// # Examples
///
/// ```
/// use riscan_pro::Project;
/// let project = Project::from_path("data/project.RiSCAN").unwrap();
/// ```
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Project> {
use std::fs::File;
let path = rsp_path(path)?;
let file = File::open(&path)?;
let xml = Element::parse(file)?;
let camera_calibrations = xml.children("calibrations/camcalibs/camcalib_opencv")?
.iter()
.map(|camcalib_opencv| {
let camera_calibration = CameraCalibration::from_element(camcalib_opencv)?;
Ok((camera_calibration.name.clone(), camera_calibration))
})
.collect::<Result<HashMap<_, _>>>()?;
let mount_calibrations = xml.children("calibrations/mountcalibs/mountcalib")?
.iter()
.map(|mountcalib| {
let mount_calibration = MountCalibration::from_element(mountcalib)?;
Ok((mount_calibration.name.clone(), mount_calibration))
})
.collect::<Result<HashMap<_, _>>>()?;
let scan_positions = xml.children("scanpositions/scanposition")?
.iter()
.map(|scanposition| {
let scan_position = ScanPosition::from_element(scanposition)?;
Ok((scan_position.name.clone(), scan_position))
})
.collect::<Result<HashMap<_, _>>>()?;
Ok(Project {
camera_calibrations: camera_calibrations,
mount_calibrations: mount_calibrations,
scan_positions: scan_positions,
path: path.to_path_buf(),
pop: utils::parse_projective3(xml.child("pop/matrix")?.as_str()?)?,
})
}
/// Returns a scan position, as determined by the path.
///
/// # Examples
///
/// ```
/// use riscan_pro::Project;
/// let project = Project::from_path("data/project.RiSCAN").unwrap();
/// let scan_position1 = project.scan_positions.get("SP01").unwrap();
/// let path = "data/project.RiSCAN/SCANS/SP01/SCANPOSIMAGES/SP01 - Image001.csv";
/// let scan_position2 = project.scan_position_from_path(path).unwrap();
/// assert_eq!(scan_position1, scan_position2);
/// ```
pub fn scan_position_from_path<P: AsRef<Path>>(&self, path: P) -> Result<&ScanPosition> {
path.as_ref()
.file_stem()
.map(|file_stem| file_stem.to_string_lossy())
.and_then(|file_stem| {
file_stem.split(" - ").next().and_then(|name| {
self.scan_positions.get(name)
})
})
.ok_or_else(|| Error::ScanPositionFromPath(path.as_ref().to_path_buf()))
}
}
impl CameraCalibration {
fn from_element(element: &Element) -> Result<CameraCalibration> {
let version = element.child("version")?.as_str()?;
if version == "2" {
Ok(CameraCalibration {
name: element.child("name")?.as_str()?.to_string(),
cx: element.child("internal_opencv/cx")?.parse_text()?,
cy: element.child("internal_opencv/cy")?.parse_text()?,
fx: element.child("internal_opencv/fx")?.parse_text()?,
fy: element.child("internal_opencv/fy")?.parse_text()?,
k1: element.child("internal_opencv/k1")?.parse_text()?,
k2: element.child("internal_opencv/k2")?.parse_text()?,
k3: element.child("internal_opencv/k3")?.parse_text()?,
k4: element.child("internal_opencv/k4")?.parse_text()?,
p1: element.child("internal_opencv/p1")?.parse_text()?,
p2: element.child("internal_opencv/p2")?.parse_text()?,
tan_max_horz: element.child("angle_extents/tan_max_horz")?.parse_text()?,
tan_max_vert: element.child("angle_extents/tan_max_vert")?.parse_text()?,
tan_min_horz: element.child("angle_extents/tan_min_horz")?.parse_text()?,
tan_min_vert: element.child("angle_extents/tan_min_vert")?.parse_text()?,
width: element.child("intrinsic_opencv/nx")?.parse_text()?,
height: element.child("intrinsic_opencv/ny")?.parse_text()?,
})
} else {
Err(Error::CameraCalibrationVersion(version.to_string()))
}
}
}
impl MountCalibration {
fn from_element(element: &Element) -> Result<MountCalibration> {
Ok(MountCalibration {
name: element.child("name")?.as_str()?.to_string(),
matrix: utils::parse_projective3(element.child("matrix")?.as_str()?)?,
})
}
}
impl ScanPosition {
fn from_element(element: &Element) -> Result<ScanPosition> {
Ok(ScanPosition {
name: element.child("name")?.as_str()?.to_string(),
images: element
.children("scanposimages/scanposimage")?
.iter()
.map(|scanposimage| {
let image = Image::from_element(scanposimage)?;
Ok((image.name.clone(), image))
})
.collect::<Result<HashMap<_, _>>>()?,
sop: utils::parse_projective3(element.child("sop/matrix")?.as_str()?)?,
})
}
}
impl Image {
fn from_element(element: &Element) -> Result<Image> {
Ok(Image {
name: element.child("name")?.as_str()?.to_string(),
cop: utils::parse_projective3(element.child("cop/matrix")?.as_str()?)?,
camera_calibration_name: element.child("camcalib_ref")?.noderef()?.to_string(),
mount_calibration_name: element.child("mountcalib_ref")?.noderef()?.to_string(),
})
}
}
fn rsp_path<P: AsRef<Path>>(path: P) -> Result<PathBuf> {
if let Some(extension) = path.as_ref().extension() {
let mut path_buf = path.as_ref().to_path_buf();
if extension == "rsp" {
return Ok(path_buf);
} else if extension == "RiSCAN" {
path_buf.push("project.rsp");
return Ok(path_buf);
}
}
let mut path_buf = PathBuf::new();
for component in path.as_ref().iter() {
path_buf.push(component);
if Path::new(component)
.extension()
.map(|e| e == "RiSCAN")
.unwrap_or(false)
{
return rsp_path(path_buf);
}
}
Err(Error::ProjectPath(path_buf))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_path() {
Project::from_path("data/project.RiSCAN").unwrap();
Project::from_path("data/project.RiSCAN/project.rsp").unwrap();
Project::from_path("data/project.RiSCAN/SCANS").unwrap();
assert!(Project::from_path("data").is_err());
}
#[test]
fn mount_calibrations() {
use utils;
let project = Project::from_path("data/project.RiSCAN").unwrap();
let mount_calibration = project
.mount_calibrations
.get("Infratec_VarioCAM_HD_15mm_11-16-2015_Preston")
.unwrap();
let matrix = utils::parse_projective3("-0.010877741999999997 -0.003724941 -0.999933898 0.18508641 0.019274697 0.999806486 -0.0039341460000000013 0.000460517 0.99975505 -0.019316217 -0.01080384 -0.092802787 0 0 0 1").unwrap();
assert_eq!(matrix, **mount_calibration);
}
#[test]
fn scan_position_from_path() {
let project = Project::from_path("data/project.RiSCAN").unwrap();
let scan_position1 = project.scan_positions.get("SP01").unwrap();
let scan_position2 = project
.scan_position_from_path(
"data/project.RiSCAN/SCANS/SP01/SCANPOSIMAGES/SP01 - Image001.csv",
)
.unwrap();
assert_eq!(scan_position1, scan_position2);
}
#[test]
fn only_accept_version_2_camera_calibrations() {
Project::from_path("data/project.RiSCAN").unwrap();
assert!(Project::from_path("data/camera-calibration-version-0.rsp").is_err());
assert!(Project::from_path("data/camera-calibration-version-1.rsp").is_err());
}
}
|
use std::path::{Path, PathBuf};
#[cfg(unix)]
use std::os::unix::prelude::*;
#[cfg(unix)]
use std::ffi::OsString;
#[cfg(windows)]
use std::str;
#[allow(dead_code)]
#[derive(Debug)]
pub enum RawPath {
Path(PathBuf),
Bytes(Vec<u8>),
}
impl RawPath {
#[allow(dead_code)]
pub fn new() -> Self {
RawPath::Bytes(vec![])
}
pub fn from_bytes(bytes: Vec<u8>) -> Self {
Self::from_bytes_(bytes)
}
pub fn as_path(&self) -> Option<&Path> {
match *self {
RawPath::Path(ref p) => Some(&p),
RawPath::Bytes(_) => None,
}
}
pub fn as_bytes(&self) -> &[u8] {
match *self {
RawPath::Path(ref p) => path2bytes(p),
RawPath::Bytes(ref b) => b,
}
}
#[cfg(windows)]
fn from_bytes_(bytes: Vec<u8>) -> Self {
if let Ok(s) = str::from_utf8(&bytes) {
return RawPath::Path(PathBuf::from(s));
}
RawPath::Bytes(bytes)
}
#[cfg(unix)]
fn from_bytes_(bytes: Vec<u8>) -> Self {
RawPath::Path(PathBuf::from(OsString::from_vec(bytes)))
}
}
#[cfg(windows)]
fn path2bytes(p: &Path) -> &[u8] {
p.as_os_str().to_str().unwrap().as_bytes()
}
#[cfg(unix)]
fn path2bytes(p: &Path) -> &[u8] {
p.as_os_str().as_bytes()
}
Cleanup rawpath module
Splitted in two os-specific submodules
pub use self::os::RawPath;
#[cfg(unix)]
mod os {
use std::path::{Path, PathBuf};
use std::os::unix::prelude::*;
use std::ffi::OsString;
#[derive(Debug)]
pub struct RawPath(PathBuf);
impl RawPath {
#[allow(dead_code)]
pub fn new() -> Self {
Self::from_bytes(vec![])
}
pub fn from_bytes(bytes: Vec<u8>) -> Self {
RawPath(PathBuf::from(OsString::from_vec(bytes)))
}
pub fn as_path(&self) -> Option<&Path> {
Some(&self.0)
}
pub fn as_bytes(&self) -> &[u8] {
self.0.as_os_str().as_bytes()
}
}
}
#[cfg(windows)]
mod os {
use std::path::{Path, PathBuf};
use std::str;
#[derive(Debug)]
pub enum RawPath {
Path(PathBuf),
Bytes(Vec<u8>),
}
impl RawPath {
#[allow(dead_code)]
pub fn new() -> Self {
RawPath::Bytes(vec![])
}
pub fn from_bytes(bytes: Vec<u8>) -> Self {
if let Ok(s) = str::from_utf8(&bytes) {
return RawPath::Path(PathBuf::from(s));
}
RawPath::Bytes(bytes)
}
pub fn as_path(&self) -> Option<&Path> {
match *self {
RawPath::Path(ref p) => Some(&p),
RawPath::Bytes(_) => None,
}
}
pub fn as_bytes(&self) -> &[u8] {
match *self {
RawPath::Path(ref p) => p.as_os_str().to_str().unwrap().as_bytes(),
RawPath::Bytes(ref b) => b,
}
}
}
}
|
use redis;
use redis::{RedisResult,Value};
use redis::Value::{Nil, Okay};
use std::fs::File;
use std::path::Path;
use std::io::{self,Read};
use std::thread::sleep_ms;
use rand;
use rand::distributions::{IndependentSample, Range};
use time;
const DEFAULT_RETRY_COUNT : u32 = 3;
const DEFAULT_RETRY_DELAY : u32 = 200;
const CLOCK_DRIFT_FACTOR : f32 = 0.01;
const UNLOCK_SCRIPT : &'static str = r"if redis.call('get',KEYS[1]) == ARGV[1] then
return redis.call('del',KEYS[1])
else
return 0
end";
/// The lock manager.
///
/// Implements the necessary functionality to acquire and release locks
/// and handles the Redis connections.
pub struct RedLock {
/// List of all Redis clients
pub servers: Vec<redis::Client>,
quorum: u32,
retry_count: u32,
retry_delay: u32
}
pub struct Lock<'a> {
/// The resource to lock. Will be used as the key in Redis.
pub resource: &'a [u8],
/// The value for this lock.
pub val: Vec<u8>,
/// Time the lock is still valid.
/// Should only be slightly smaller than the requested TTL.
pub validity_time: usize
}
impl RedLock {
/// Create a new lock manager instance, defined by the given Redis connection uris.
/// Quorum is defined to be N/2+1, with N being the number of given Redis instances.
///
/// Sample URI: `"redis://127.0.0.1:6379"`
pub fn new(uris: Vec<&str>) -> RedLock {
let quorum = (uris.len() as u32) / 2 + 1;
let mut servers = Vec::with_capacity(uris.len());
for &uri in uris.iter() {
servers.push(redis::Client::open(uri).unwrap())
}
RedLock {
servers: servers,
quorum: quorum,
retry_count: DEFAULT_RETRY_COUNT,
retry_delay: DEFAULT_RETRY_DELAY
}
}
/// Get 20 random bytes from `/dev/urandom`.
pub fn get_unique_lock_id(&self) -> io::Result<Vec<u8>> {
let file = File::open(&Path::new("/dev/urandom")).unwrap();
let mut buf = Vec::with_capacity(20);
match file.take(20).read_to_end(&mut buf) {
Ok(20) => return Ok(buf),
Ok(_) => return Err(io::Error::new(io::ErrorKind::Other, "Can't read enough random bytes")),
Err(e) => return Err(e)
}
}
/// Set retry count and retry delay.
///
/// Retry count defaults to `3`.
/// Retry delay defaults to `200`.
pub fn set_retry(&mut self, count: u32, delay: u32) {
self.retry_count = count;
self.retry_delay = delay;
}
fn lock_instance(&self, client: &redis::Client, resource: &[u8], val: &[u8], ttl: usize) -> bool {
let con = match client.get_connection() {
Err(_) => return false,
Ok(val) => val
};
let result : RedisResult<Value> = redis::cmd("SET").arg(resource).arg(val).arg("nx").arg("px").arg(ttl).query(&con);
match result {
Ok(Okay) => return true,
Ok(Nil) => return false,
Ok(_) => return false,
Err(_) => return false
}
}
fn get_time(&self) -> i64 {
let time = time::get_time();
time.sec * 1000 + ((time.nsec/1000000) as i64)
}
/// Acquire the lock for the given resource and the requested TTL.
///
/// If it succeeds, a `Lock` instance is returned,
/// including the value and the validity time
///
/// If it fails. `None` is returned.
/// A user should retry after a short wait time.
pub fn lock<'a>(&'a self, resource: &'a [u8], ttl: usize) -> Option<Lock> {
let val = self.get_unique_lock_id().unwrap();
let between = Range::new(0, self.retry_delay);
let mut rng = rand::thread_rng();
for _ in 0..self.retry_count {
let mut n = 0;
let start_time = self.get_time();
for &ref client in self.servers.iter() {
if self.lock_instance(client, resource, &val, ttl) {
n += 1;
}
}
let drift = (ttl as f32 * CLOCK_DRIFT_FACTOR) as i64 + 2;
let validity_time = (ttl as i64 - ((self.get_time() - start_time)) - drift as i64) as usize;
if n >= self.quorum && validity_time > 0 {
return Some(Lock {
resource: resource.clone(),
val: val,
validity_time: validity_time
});
} else {
for &ref client in self.servers.iter() {
self.unlock_instance(client, resource, &val);
}
}
let n = between.ind_sample(&mut rng);
sleep_ms(n);
}
return None
}
fn unlock_instance(&self, client: &redis::Client, resource: &[u8], val: &[u8]) -> bool {
let con = match client.get_connection() {
Err(_) => return false,
Ok(val) => val
};
let script = redis::Script::new(UNLOCK_SCRIPT);
let result : RedisResult<i32> = script.key(resource).arg(val).invoke(&con);
match result {
Ok(val) => return val == 1,
Err(_) => return false
}
}
/// Unlock the given lock.
///
/// Unlock is best effort. It will simply try to contact all instances
/// and remove the key.
pub fn unlock(&self, lock: &Lock) {
for &ref client in self.servers.iter() {
self.unlock_instance(client, lock.resource, &lock.val);
}
}
}
#[test]
fn test_redlock_get_unique_id() {
let rl = RedLock::new(vec![]);
match rl.get_unique_lock_id() {
Ok(id) => {
assert_eq!(20, id.len());
},
err => panic!("Error thrown: {:?}", err)
}
}
#[test]
fn test_redlock_get_unique_id_uniqueness() {
let rl = RedLock::new(vec![]);
let id1 = rl.get_unique_lock_id().unwrap();
let id2 = rl.get_unique_lock_id().unwrap();
assert_eq!(20, id1.len());
assert_eq!(20, id2.len());
assert!(id1 != id2);
}
#[test]
fn test_redlock_valid_instance() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
assert_eq!(3, rl.servers.len());
assert_eq!(2, rl.quorum);
}
#[test]
fn test_redlock_direct_unlock_fails() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
assert_eq!(false, rl.unlock_instance(&rl.servers[0], &key, &val))
}
#[test]
fn test_redlock_direct_unlock_succeeds() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
redis::cmd("SET").arg(&*key).arg(&*val).execute(&con);
assert_eq!(true, rl.unlock_instance(&rl.servers[0], &key, &val))
}
#[test]
fn test_redlock_direct_lock_succeeds() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
redis::cmd("DEL").arg(&*key).execute(&con);
assert_eq!(true, rl.lock_instance(&rl.servers[0], &*key, &*val, 1000))
}
#[test]
fn test_redlock_unlock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
let _ : () = redis::cmd("SET").arg(&*key).arg(&*val).query(&con).unwrap();
let lock = Lock { resource: &key, val: val, validity_time: 0 };
assert_eq!((), rl.unlock(&lock))
}
#[test]
fn test_redlock_lock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
match rl.lock(&key, 1000) {
Some(lock) => {
assert_eq!(&*key, lock.resource);
assert_eq!(20, lock.val.len());
assert!(lock.validity_time > 900);
},
None => panic!("Lock failed")
}
}
#[test]
fn test_redlock_lock_unlock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let rl2 = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let lock = rl.lock(&key, 1000).unwrap();
assert!(lock.validity_time > 900);
match rl2.lock(&key, 1000) {
Some(_l) => panic!("Lock acquired, even though it should be locked"),
None => ()
}
rl.unlock(&lock);
match rl2.lock(&key, 1000) {
Some(l) => assert!(l.validity_time > 900),
None => panic!("Lock couldn't be acquired")
}
}
refactor: Use non-deprecated methods
Pull request: #2
Approved by: badboy
use redis;
use redis::{RedisResult,Value};
use redis::Value::{Nil, Okay};
use std::fs::File;
use std::path::Path;
use std::io::{self,Read};
use std::thread::sleep;
use std::time::Duration;
use rand;
use rand::distributions::{IndependentSample, Range};
use time;
const DEFAULT_RETRY_COUNT : u32 = 3;
const DEFAULT_RETRY_DELAY : u32 = 200;
const CLOCK_DRIFT_FACTOR : f32 = 0.01;
const UNLOCK_SCRIPT : &'static str = r"if redis.call('get',KEYS[1]) == ARGV[1] then
return redis.call('del',KEYS[1])
else
return 0
end";
/// The lock manager.
///
/// Implements the necessary functionality to acquire and release locks
/// and handles the Redis connections.
pub struct RedLock {
/// List of all Redis clients
pub servers: Vec<redis::Client>,
quorum: u32,
retry_count: u32,
retry_delay: u32
}
pub struct Lock<'a> {
/// The resource to lock. Will be used as the key in Redis.
pub resource: &'a [u8],
/// The value for this lock.
pub val: Vec<u8>,
/// Time the lock is still valid.
/// Should only be slightly smaller than the requested TTL.
pub validity_time: usize
}
impl RedLock {
/// Create a new lock manager instance, defined by the given Redis connection uris.
/// Quorum is defined to be N/2+1, with N being the number of given Redis instances.
///
/// Sample URI: `"redis://127.0.0.1:6379"`
pub fn new(uris: Vec<&str>) -> RedLock {
let quorum = (uris.len() as u32) / 2 + 1;
let mut servers = Vec::with_capacity(uris.len());
for &uri in uris.iter() {
servers.push(redis::Client::open(uri).unwrap())
}
RedLock {
servers: servers,
quorum: quorum,
retry_count: DEFAULT_RETRY_COUNT,
retry_delay: DEFAULT_RETRY_DELAY
}
}
/// Get 20 random bytes from `/dev/urandom`.
pub fn get_unique_lock_id(&self) -> io::Result<Vec<u8>> {
let file = File::open(&Path::new("/dev/urandom")).unwrap();
let mut buf = Vec::with_capacity(20);
match file.take(20).read_to_end(&mut buf) {
Ok(20) => return Ok(buf),
Ok(_) => return Err(io::Error::new(io::ErrorKind::Other, "Can't read enough random bytes")),
Err(e) => return Err(e)
}
}
/// Set retry count and retry delay.
///
/// Retry count defaults to `3`.
/// Retry delay defaults to `200`.
pub fn set_retry(&mut self, count: u32, delay: u32) {
self.retry_count = count;
self.retry_delay = delay;
}
fn lock_instance(&self, client: &redis::Client, resource: &[u8], val: &[u8], ttl: usize) -> bool {
let con = match client.get_connection() {
Err(_) => return false,
Ok(val) => val
};
let result : RedisResult<Value> = redis::cmd("SET").arg(resource).arg(val).arg("nx").arg("px").arg(ttl).query(&con);
match result {
Ok(Okay) => return true,
Ok(Nil) => return false,
Ok(_) => return false,
Err(_) => return false
}
}
fn get_time(&self) -> i64 {
let time = time::get_time();
time.sec * 1000 + ((time.nsec/1000000) as i64)
}
/// Acquire the lock for the given resource and the requested TTL.
///
/// If it succeeds, a `Lock` instance is returned,
/// including the value and the validity time
///
/// If it fails. `None` is returned.
/// A user should retry after a short wait time.
pub fn lock<'a>(&'a self, resource: &'a [u8], ttl: usize) -> Option<Lock> {
let val = self.get_unique_lock_id().unwrap();
let between = Range::new(0, self.retry_delay);
let mut rng = rand::thread_rng();
for _ in 0..self.retry_count {
let mut n = 0;
let start_time = self.get_time();
for &ref client in self.servers.iter() {
if self.lock_instance(client, resource, &val, ttl) {
n += 1;
}
}
let drift = (ttl as f32 * CLOCK_DRIFT_FACTOR) as i64 + 2;
let validity_time = (ttl as i64 - ((self.get_time() - start_time)) - drift as i64) as usize;
if n >= self.quorum && validity_time > 0 {
return Some(Lock {
resource: resource.clone(),
val: val,
validity_time: validity_time
});
} else {
for &ref client in self.servers.iter() {
self.unlock_instance(client, resource, &val);
}
}
let n = between.ind_sample(&mut rng);
sleep(Duration::from_millis(n as u64));
}
return None
}
fn unlock_instance(&self, client: &redis::Client, resource: &[u8], val: &[u8]) -> bool {
let con = match client.get_connection() {
Err(_) => return false,
Ok(val) => val
};
let script = redis::Script::new(UNLOCK_SCRIPT);
let result : RedisResult<i32> = script.key(resource).arg(val).invoke(&con);
match result {
Ok(val) => return val == 1,
Err(_) => return false
}
}
/// Unlock the given lock.
///
/// Unlock is best effort. It will simply try to contact all instances
/// and remove the key.
pub fn unlock(&self, lock: &Lock) {
for &ref client in self.servers.iter() {
self.unlock_instance(client, lock.resource, &lock.val);
}
}
}
#[test]
fn test_redlock_get_unique_id() {
let rl = RedLock::new(vec![]);
match rl.get_unique_lock_id() {
Ok(id) => {
assert_eq!(20, id.len());
},
err => panic!("Error thrown: {:?}", err)
}
}
#[test]
fn test_redlock_get_unique_id_uniqueness() {
let rl = RedLock::new(vec![]);
let id1 = rl.get_unique_lock_id().unwrap();
let id2 = rl.get_unique_lock_id().unwrap();
assert_eq!(20, id1.len());
assert_eq!(20, id2.len());
assert!(id1 != id2);
}
#[test]
fn test_redlock_valid_instance() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
assert_eq!(3, rl.servers.len());
assert_eq!(2, rl.quorum);
}
#[test]
fn test_redlock_direct_unlock_fails() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
assert_eq!(false, rl.unlock_instance(&rl.servers[0], &key, &val))
}
#[test]
fn test_redlock_direct_unlock_succeeds() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
redis::cmd("SET").arg(&*key).arg(&*val).execute(&con);
assert_eq!(true, rl.unlock_instance(&rl.servers[0], &key, &val))
}
#[test]
fn test_redlock_direct_lock_succeeds() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
redis::cmd("DEL").arg(&*key).execute(&con);
assert_eq!(true, rl.lock_instance(&rl.servers[0], &*key, &*val, 1000))
}
#[test]
fn test_redlock_unlock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let val = rl.get_unique_lock_id().unwrap();
let con = rl.servers[0].get_connection().unwrap();
let _ : () = redis::cmd("SET").arg(&*key).arg(&*val).query(&con).unwrap();
let lock = Lock { resource: &key, val: val, validity_time: 0 };
assert_eq!((), rl.unlock(&lock))
}
#[test]
fn test_redlock_lock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
match rl.lock(&key, 1000) {
Some(lock) => {
assert_eq!(&*key, lock.resource);
assert_eq!(20, lock.val.len());
assert!(lock.validity_time > 900);
},
None => panic!("Lock failed")
}
}
#[test]
fn test_redlock_lock_unlock() {
let rl = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let rl2 = RedLock::new(vec!["redis://127.0.0.1:6380/", "redis://127.0.0.1:6381/", "redis://127.0.0.1:6382/", ]);
let key = rl.get_unique_lock_id().unwrap();
let lock = rl.lock(&key, 1000).unwrap();
assert!(lock.validity_time > 900);
match rl2.lock(&key, 1000) {
Some(_l) => panic!("Lock acquired, even though it should be locked"),
None => ()
}
rl.unlock(&lock);
match rl2.lock(&key, 1000) {
Some(l) => assert!(l.validity_time > 900),
None => panic!("Lock couldn't be acquired")
}
}
|
use std::cmp::{max, min};
use crate::tokenizer;
use crate::{
constants::*,
token_collector::{Style, StyledToken, TokenCollector},
};
use diffus::{
edit::{self, collection},
Diffable,
};
/// If more than this percentage of either adds or moves is highlighted, we
/// consider it to be a replacement rather than a move, and skip highlighting
/// it.
const MAX_HIGHLIGHT_PERCENTAGE: usize = 30;
const LARGE_COUNT_CHANGE_PERCENT: usize = 100;
const SMALL_COUNT_CHANGE: usize = 10;
/// Like format!(), but faster for our special case
fn format_simple_line(old_new: &str, plus_minus: char, contents: &str) -> String {
let mut line = String::with_capacity(old_new.len() + 1 + contents.len() + NORMAL.len());
line.push_str(old_new);
line.push(plus_minus);
line.push_str(contents);
line.push_str(NORMAL);
return line;
}
/// Format old and new lines in OLD and NEW colors.
///
/// No intra-line refinement.
///
/// Returns one old and one new line array.
#[must_use]
fn simple_format(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
let mut old_lines: Vec<String> = Vec::new();
let mut new_lines: Vec<String> = Vec::new();
for old_line in old_text.lines() {
// Use a specialized line formatter since this code is in a hot path
old_lines.push(format_simple_line(OLD, '-', old_line));
}
if (!old_text.is_empty()) && !old_text.ends_with('\n') {
old_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
for add_line in new_text.lines() {
// Use a specialized line formatter since this code is in a hot path
new_lines.push(format_simple_line(NEW, '+', add_line));
}
if (!new_text.is_empty()) && !new_text.ends_with('\n') {
new_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
return (old_lines, new_lines);
}
/// Returns the last byte index of the nth line of the given string.
fn last_byte_index_of_nth_line(text: &str, line_count: usize) -> usize {
let mut newlines_found: usize = 0;
for (byte_index, c) in text.char_indices() {
if c != '\n' {
continue;
}
// Newline found
newlines_found += 1;
if line_count == newlines_found {
return byte_index;
}
}
panic!("Line {} not found in \n{}", line_count, text);
}
/// If old has 2 lines and new 30, try highlighting changes between old and the
/// first 2 lines of new.
///
/// Test case: testdata/partial-refine.diff
#[must_use]
fn partial_format(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
let old_linecount = old_text.lines().count();
let new_linecount = new_text.lines().count();
if old_linecount == new_linecount {
return simple_format(old_text, new_text);
}
if old_linecount > new_linecount {
return partial_format_shortened(old_text, new_text);
}
// Invariant at this point: old_text has fewer lines than new_text
if !old_text.ends_with('\n') {
// old_text does *not* end in a newline
// FIXME: Write tests for and handle this case, needs some thought on
// how to poplulate new_initial_lines, and how to merge the results at
// the end of this function.
return simple_format(old_text, new_text);
}
// FIXME: We should try the old_text lines vs both the first and the last
// lines of new_text and pick the response that has the smallest amount of
// changes. Currently we just compare old_text to the start of new_text.
// Extract the old_linecount initial lines from new_text.
let new_initial_lines_last_offset = last_byte_index_of_nth_line(new_text, old_linecount);
let new_remaining_lines_first_offset = new_initial_lines_last_offset + 1;
let new_initial_lines = &new_text[0..new_remaining_lines_first_offset];
let (mut old_text_vs_new_initial_lines_old, mut old_text_vs_new_initial_lines_new) =
format_split(old_text, new_initial_lines);
// Extract the remaining lines from new_text
let new_remaining_lines = &new_text[new_remaining_lines_first_offset..];
let (_, mut new_remaining_lines) = simple_format("", new_remaining_lines);
let mut old_lines: Vec<String> = Vec::new();
old_lines.append(&mut old_text_vs_new_initial_lines_old);
let mut new_lines: Vec<String> = Vec::new();
new_lines.append(&mut old_text_vs_new_initial_lines_new);
new_lines.append(&mut new_remaining_lines);
return (old_lines, new_lines);
}
/// If old has 30 lines and new 2, try highlighting changes between the first 2
/// lines of old and new.
///
/// Test case: testdata/shorten-section.diff
///
/// See also partial_format() which is the opposite of this function.
#[must_use]
fn partial_format_shortened(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
// Invariant at this point: old_text has more lines than new_text
if !new_text.ends_with('\n') {
// new_text does *not* end in a newline
// FIXME: Write tests for and handle this case, needs some thought on
// how to poplulate old_initial_lines, and how to merge the results at
// the end of this function.
return simple_format(old_text, new_text);
}
// FIXME: We should try the new_text lines vs both the first and the last
// lines of old_text and pick the response that has the smallest amount of
// changes. Currently we just compare new_text to the start of old_text.
// Extract the new_linecount initial lines from old_text.
let new_linecount = new_text.lines().count();
let old_initial_lines_last_offset = last_byte_index_of_nth_line(old_text, new_linecount);
let old_remaining_lines_first_offset = old_initial_lines_last_offset + 1;
let old_initial_lines = &old_text[0..old_remaining_lines_first_offset];
let (mut new_text_vs_old_initial_lines_old, mut new_text_vs_old_initial_lines_new) =
format_split(old_initial_lines, new_text);
// Extract the remaining lines from new_text
let old_remaining_lines = &old_text[old_remaining_lines_first_offset..];
let (mut old_remaining_lines, _) = simple_format(old_remaining_lines, "");
let mut return_me_old: Vec<String> = Vec::new();
let mut return_me_new: Vec<String> = Vec::new();
return_me_old.append(&mut new_text_vs_old_initial_lines_old);
return_me_old.append(&mut old_remaining_lines);
return_me_new.append(&mut new_text_vs_old_initial_lines_new);
return (return_me_old, return_me_new);
}
/// Returns a vector of ANSI highlighted lines
#[must_use]
pub fn format(old_text: &str, new_text: &str) -> Vec<String> {
let (mut old_lines, mut new_lines) = format_split(old_text, new_text);
let mut merged: Vec<String> = Vec::new();
merged.append(&mut old_lines);
merged.append(&mut new_lines);
return merged;
}
/// Returns two vectors of ANSI highlighted lines, the old lines and the new
/// lines.
#[must_use]
fn format_split(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
if old_text.is_empty() || new_text.is_empty() {
return simple_format(old_text, new_text);
}
// These checks make us faster, please use the benchmark.py script before
// and after if you change this.
if is_large_byte_count_change(old_text, new_text) {
return partial_format(old_text, new_text);
}
if is_large_newline_count_change(old_text, new_text) {
return partial_format(old_text, new_text);
}
// Find diffs between adds and removals
let mut old_collector = TokenCollector::create(StyledToken::new("-".to_string(), Style::Old));
let mut new_collector = TokenCollector::create(StyledToken::new("+".to_string(), Style::New));
// Tokenize adds and removes before diffing them
let tokenized_old = tokenizer::tokenize(old_text);
let tokenized_new = tokenizer::tokenize(new_text);
let diff = tokenized_old.diff(&tokenized_new);
match diff {
edit::Edit::Copy(unchanged) => {
for token in unchanged {
old_collector.push(StyledToken::new(token.to_string(), Style::Old));
new_collector.push(StyledToken::new(token.to_string(), Style::New));
}
}
edit::Edit::Change(diff) => {
diff.into_iter()
.map(|edit| {
match edit {
collection::Edit::Copy(token) => {
old_collector.push(StyledToken::new(token.to_string(), Style::Old));
new_collector.push(StyledToken::new(token.to_string(), Style::New));
}
collection::Edit::Insert(token) => {
if *token == "\n" {
// Make sure the highlighted linefeed is visible
new_collector
.push(StyledToken::new("⏎".to_string(), Style::NewInverse));
}
new_collector
.push(StyledToken::new(token.to_string(), Style::NewInverse));
}
collection::Edit::Remove(token) => {
if *token == "\n" {
// Make sure the highlighted linefeed is visible
old_collector
.push(StyledToken::new("⏎".to_string(), Style::OldInverse));
}
old_collector
.push(StyledToken::new(token.to_string(), Style::OldInverse));
}
collection::Edit::Change(_) => unimplemented!("Not implemented, help!"),
};
})
.for_each(drop);
}
}
let highlighted_old_text = old_collector.render();
let highlighted_new_text = new_collector.render();
let highlighted_bytes_count =
old_collector.highlighted_chars_count() + new_collector.highlighted_chars_count();
let bytes_count = old_collector.chars_count() + new_collector.chars_count();
// Don't highlight too much
if (100 * highlighted_bytes_count) / bytes_count > MAX_HIGHLIGHT_PERCENTAGE {
return partial_format(old_text, new_text);
}
return to_lines(&highlighted_old_text, &highlighted_new_text);
}
#[must_use]
fn is_large_count_change(count1: usize, count2: usize) -> bool {
// This check makes us ignore some changes, thus making us faster. Please
// use the benchmark.py script before and after if you touch this code.
let high_count = max(count1, count2);
let low_count = min(count1, count2);
if high_count - low_count <= SMALL_COUNT_CHANGE {
return false;
}
// "+ 99" makes the result round up, so 0->0, 1->2.
let low_count_plus_percentage = (low_count * (LARGE_COUNT_CHANGE_PERCENT + 100) + 99) / 100;
return high_count >= low_count_plus_percentage;
}
#[must_use]
fn is_large_byte_count_change(old_text: &str, new_text: &str) -> bool {
return is_large_count_change(old_text.len(), new_text.len());
}
#[must_use]
fn is_large_newline_count_change(old_text: &str, new_text: &str) -> bool {
let old_newline_count = bytecount::count(old_text.as_bytes(), b'\n');
let new_newline_count = bytecount::count(new_text.as_bytes(), b'\n');
return is_large_count_change(old_newline_count, new_newline_count);
}
#[must_use]
fn to_lines(old: &str, new: &str) -> (Vec<String>, Vec<String>) {
let mut old_lines: Vec<String> = Vec::new();
for highlighted_old_line in old.lines() {
old_lines.push(highlighted_old_line.to_string());
}
if (!old.is_empty()) && !old.ends_with('\n') {
old_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
let mut new_lines: Vec<String> = Vec::new();
for highlighted_new_line in new.lines() {
new_lines.push(highlighted_new_line.to_string());
}
if (!new.is_empty()) && !new.ends_with('\n') {
new_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
return (old_lines, new_lines);
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(test)]
use pretty_assertions::assert_eq;
fn simple_format_merged(old_text: &str, new_text: &str) -> Vec<String> {
let (mut old_lines, mut new_lines) = simple_format(old_text, new_text);
let mut merged: Vec<String> = Vec::new();
merged.append(&mut old_lines);
merged.append(&mut new_lines);
return merged;
}
#[test]
fn test_simple_format_adds_and_removes() {
let empty: Vec<String> = Vec::new();
assert_eq!(
simple_format_merged(&"".to_string(), &"".to_string()),
empty
);
// Test adds-only
assert_eq!(
simple_format_merged(&"".to_string(), &"a\n".to_string()),
["".to_string() + NEW + "+a" + NORMAL]
);
assert_eq!(
simple_format_merged(&"".to_string(), &"a\nb\n".to_string()),
[
"".to_string() + NEW + "+a" + NORMAL,
"".to_string() + NEW + "+b" + NORMAL,
]
);
// Test removes-only
assert_eq!(
simple_format_merged(&"a\n".to_string(), &"".to_string()),
["".to_string() + OLD + "-a" + NORMAL]
);
assert_eq!(
simple_format_merged(&"a\nb\n".to_string(), &"".to_string()),
[
"".to_string() + OLD + "-a" + NORMAL,
"".to_string() + OLD + "-b" + NORMAL,
]
);
}
#[test]
fn test_quote_change() {
let result = format(&"<quotes>\n".to_string(), &"[quotes]\n".to_string());
assert_eq!(
result,
[
format!(
"{}-{}<{}quotes{}>{}",
OLD, INVERSE_VIDEO, NOT_INVERSE_VIDEO, INVERSE_VIDEO, NORMAL
),
format!(
"{}+{}[{}quotes{}]{}",
NEW, INVERSE_VIDEO, NOT_INVERSE_VIDEO, INVERSE_VIDEO, NORMAL
),
]
)
}
#[test]
fn test_almost_empty_changes() {
let result = format(&"x\n".to_string(), &"".to_string());
assert_eq!(result, [format!("{}-x{}", OLD, NORMAL),]);
let result = format(&"".to_string(), &"x\n".to_string());
assert_eq!(result, [format!("{}+x{}", NEW, NORMAL),]);
}
#[test]
fn test_is_large_byte_count_change() {
assert_eq!(is_large_byte_count_change("", ""), false);
assert_eq!(
is_large_byte_count_change("", &"x".repeat(SMALL_COUNT_CHANGE)),
false
);
assert_eq!(
is_large_byte_count_change("", &"x".repeat(SMALL_COUNT_CHANGE + 1)),
true
);
// Verify that doubling the length counts as large
let base_len = SMALL_COUNT_CHANGE * 2;
let double_len = base_len * 2;
let almost_double_len = double_len - 1;
assert_eq!(
is_large_byte_count_change(&"x".repeat(base_len), &"y".repeat(almost_double_len)),
false
);
assert_eq!(
is_large_byte_count_change(&"x".repeat(base_len), &"y".repeat(double_len)),
true
);
}
}
Extract common code into a function
use std::cmp::{max, min};
use crate::tokenizer;
use crate::{
constants::*,
token_collector::{Style, StyledToken, TokenCollector},
};
use diffus::{
edit::{self, collection},
Diffable,
};
/// If more than this percentage of either adds or moves is highlighted, we
/// consider it to be a replacement rather than a move, and skip highlighting
/// it.
const MAX_HIGHLIGHT_PERCENTAGE: usize = 30;
const LARGE_COUNT_CHANGE_PERCENT: usize = 100;
const SMALL_COUNT_CHANGE: usize = 10;
/// Like format!(), but faster for our special case
fn format_simple_line(old_new: &str, plus_minus: char, contents: &str) -> String {
let mut line = String::with_capacity(old_new.len() + 1 + contents.len() + NORMAL.len());
line.push_str(old_new);
line.push(plus_minus);
line.push_str(contents);
line.push_str(NORMAL);
return line;
}
/// Format old and new lines in OLD and NEW colors.
///
/// No intra-line refinement.
///
/// Returns one old and one new line array.
#[must_use]
fn simple_format(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
let mut old_lines: Vec<String> = Vec::new();
let mut new_lines: Vec<String> = Vec::new();
for old_line in old_text.lines() {
// Use a specialized line formatter since this code is in a hot path
old_lines.push(format_simple_line(OLD, '-', old_line));
}
if (!old_text.is_empty()) && !old_text.ends_with('\n') {
old_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
for add_line in new_text.lines() {
// Use a specialized line formatter since this code is in a hot path
new_lines.push(format_simple_line(NEW, '+', add_line));
}
if (!new_text.is_empty()) && !new_text.ends_with('\n') {
new_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
return (old_lines, new_lines);
}
/// Returns the last byte index of the nth line of the given string.
fn last_byte_index_of_nth_line(text: &str, line_count: usize) -> usize {
let mut newlines_found: usize = 0;
for (byte_index, c) in text.char_indices() {
if c != '\n' {
continue;
}
// Newline found
newlines_found += 1;
if line_count == newlines_found {
return byte_index;
}
}
panic!("Line {} not found in \n{}", line_count, text);
}
#[must_use]
fn extract_initial_lines(count: usize, text: &str) -> &str {
let initial_lines_last_offset = last_byte_index_of_nth_line(text, count);
let remaining_lines_first_offset = initial_lines_last_offset + 1;
let initial_lines = &text[0..remaining_lines_first_offset];
return initial_lines;
}
/// If old has 2 lines and new 30, try highlighting changes between old and the
/// first 2 lines of new.
///
/// Test case: testdata/partial-refine.diff
#[must_use]
fn partial_format(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
let old_linecount = old_text.lines().count();
let new_linecount = new_text.lines().count();
if old_linecount == new_linecount {
return simple_format(old_text, new_text);
}
if old_linecount > new_linecount {
return partial_format_shortened(old_text, new_text);
}
// Invariant at this point: old_text has fewer lines than new_text
if !old_text.ends_with('\n') {
// old_text does *not* end in a newline
// FIXME: Write tests for and handle this case, needs some thought on
// how to poplulate new_initial_lines, and how to merge the results at
// the end of this function.
return simple_format(old_text, new_text);
}
// FIXME: We should try the old_text lines vs both the first and the last
// lines of new_text and pick the response that has the smallest amount of
// changes. Currently we just compare old_text to the start of new_text.
// Extract the old_linecount initial lines from new_text.
let new_initial_lines = extract_initial_lines(old_linecount, new_text);
let (mut old_text_vs_new_initial_lines_old, mut old_text_vs_new_initial_lines_new) =
format_split(old_text, new_initial_lines);
// Extract the remaining lines from new_text
let new_remaining_lines = &new_text[new_initial_lines.len()..];
let (_, mut new_remaining_lines) = simple_format("", new_remaining_lines);
let mut old_lines: Vec<String> = Vec::new();
old_lines.append(&mut old_text_vs_new_initial_lines_old);
let mut new_lines: Vec<String> = Vec::new();
new_lines.append(&mut old_text_vs_new_initial_lines_new);
new_lines.append(&mut new_remaining_lines);
return (old_lines, new_lines);
}
/// If old has 30 lines and new 2, try highlighting changes between the first 2
/// lines of old and new.
///
/// Test case: testdata/shorten-section.diff
///
/// See also partial_format() which is the opposite of this function.
#[must_use]
fn partial_format_shortened(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
// Invariant at this point: old_text has more lines than new_text
if !new_text.ends_with('\n') {
// new_text does *not* end in a newline
// FIXME: Write tests for and handle this case, needs some thought on
// how to poplulate old_initial_lines, and how to merge the results at
// the end of this function.
return simple_format(old_text, new_text);
}
// FIXME: We should try the new_text lines vs both the first and the last
// lines of old_text and pick the response that has the smallest amount of
// changes. Currently we just compare new_text to the start of old_text.
// Extract the new_linecount initial lines from old_text.
let new_linecount = new_text.lines().count();
let old_initial_lines = extract_initial_lines(new_linecount, old_text);
let (mut new_text_vs_old_initial_lines_old, mut new_text_vs_old_initial_lines_new) =
format_split(old_initial_lines, new_text);
// Extract the remaining lines from new_text
let old_remaining_lines = &old_text[old_initial_lines.len()..];
let (mut old_remaining_lines, _) = simple_format(old_remaining_lines, "");
let mut return_me_old: Vec<String> = Vec::new();
let mut return_me_new: Vec<String> = Vec::new();
return_me_old.append(&mut new_text_vs_old_initial_lines_old);
return_me_old.append(&mut old_remaining_lines);
return_me_new.append(&mut new_text_vs_old_initial_lines_new);
return (return_me_old, return_me_new);
}
/// Returns a vector of ANSI highlighted lines
#[must_use]
pub fn format(old_text: &str, new_text: &str) -> Vec<String> {
let (mut old_lines, mut new_lines) = format_split(old_text, new_text);
let mut merged: Vec<String> = Vec::new();
merged.append(&mut old_lines);
merged.append(&mut new_lines);
return merged;
}
/// Returns two vectors of ANSI highlighted lines, the old lines and the new
/// lines.
#[must_use]
fn format_split(old_text: &str, new_text: &str) -> (Vec<String>, Vec<String>) {
if old_text.is_empty() || new_text.is_empty() {
return simple_format(old_text, new_text);
}
// These checks make us faster, please use the benchmark.py script before
// and after if you change this.
if is_large_byte_count_change(old_text, new_text) {
return partial_format(old_text, new_text);
}
if is_large_newline_count_change(old_text, new_text) {
return partial_format(old_text, new_text);
}
// Find diffs between adds and removals
let mut old_collector = TokenCollector::create(StyledToken::new("-".to_string(), Style::Old));
let mut new_collector = TokenCollector::create(StyledToken::new("+".to_string(), Style::New));
// Tokenize adds and removes before diffing them
let tokenized_old = tokenizer::tokenize(old_text);
let tokenized_new = tokenizer::tokenize(new_text);
let diff = tokenized_old.diff(&tokenized_new);
match diff {
edit::Edit::Copy(unchanged) => {
for token in unchanged {
old_collector.push(StyledToken::new(token.to_string(), Style::Old));
new_collector.push(StyledToken::new(token.to_string(), Style::New));
}
}
edit::Edit::Change(diff) => {
diff.into_iter()
.map(|edit| {
match edit {
collection::Edit::Copy(token) => {
old_collector.push(StyledToken::new(token.to_string(), Style::Old));
new_collector.push(StyledToken::new(token.to_string(), Style::New));
}
collection::Edit::Insert(token) => {
if *token == "\n" {
// Make sure the highlighted linefeed is visible
new_collector
.push(StyledToken::new("⏎".to_string(), Style::NewInverse));
}
new_collector
.push(StyledToken::new(token.to_string(), Style::NewInverse));
}
collection::Edit::Remove(token) => {
if *token == "\n" {
// Make sure the highlighted linefeed is visible
old_collector
.push(StyledToken::new("⏎".to_string(), Style::OldInverse));
}
old_collector
.push(StyledToken::new(token.to_string(), Style::OldInverse));
}
collection::Edit::Change(_) => unimplemented!("Not implemented, help!"),
};
})
.for_each(drop);
}
}
let highlighted_old_text = old_collector.render();
let highlighted_new_text = new_collector.render();
let highlighted_bytes_count =
old_collector.highlighted_chars_count() + new_collector.highlighted_chars_count();
let bytes_count = old_collector.chars_count() + new_collector.chars_count();
// Don't highlight too much
if (100 * highlighted_bytes_count) / bytes_count > MAX_HIGHLIGHT_PERCENTAGE {
return partial_format(old_text, new_text);
}
return to_lines(&highlighted_old_text, &highlighted_new_text);
}
#[must_use]
fn is_large_count_change(count1: usize, count2: usize) -> bool {
// This check makes us ignore some changes, thus making us faster. Please
// use the benchmark.py script before and after if you touch this code.
let high_count = max(count1, count2);
let low_count = min(count1, count2);
if high_count - low_count <= SMALL_COUNT_CHANGE {
return false;
}
// "+ 99" makes the result round up, so 0->0, 1->2.
let low_count_plus_percentage = (low_count * (LARGE_COUNT_CHANGE_PERCENT + 100) + 99) / 100;
return high_count >= low_count_plus_percentage;
}
#[must_use]
fn is_large_byte_count_change(old_text: &str, new_text: &str) -> bool {
return is_large_count_change(old_text.len(), new_text.len());
}
#[must_use]
fn is_large_newline_count_change(old_text: &str, new_text: &str) -> bool {
let old_newline_count = bytecount::count(old_text.as_bytes(), b'\n');
let new_newline_count = bytecount::count(new_text.as_bytes(), b'\n');
return is_large_count_change(old_newline_count, new_newline_count);
}
#[must_use]
fn to_lines(old: &str, new: &str) -> (Vec<String>, Vec<String>) {
let mut old_lines: Vec<String> = Vec::new();
for highlighted_old_line in old.lines() {
old_lines.push(highlighted_old_line.to_string());
}
if (!old.is_empty()) && !old.ends_with('\n') {
old_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
let mut new_lines: Vec<String> = Vec::new();
for highlighted_new_line in new.lines() {
new_lines.push(highlighted_new_line.to_string());
}
if (!new.is_empty()) && !new.ends_with('\n') {
new_lines.push(format!(
"{}{}{}",
NO_EOF_NEWLINE_COLOR, NO_EOF_NEWLINE_MARKER, NORMAL
));
}
return (old_lines, new_lines);
}
#[cfg(test)]
mod tests {
use super::*;
#[cfg(test)]
use pretty_assertions::assert_eq;
fn simple_format_merged(old_text: &str, new_text: &str) -> Vec<String> {
let (mut old_lines, mut new_lines) = simple_format(old_text, new_text);
let mut merged: Vec<String> = Vec::new();
merged.append(&mut old_lines);
merged.append(&mut new_lines);
return merged;
}
#[test]
fn test_simple_format_adds_and_removes() {
let empty: Vec<String> = Vec::new();
assert_eq!(
simple_format_merged(&"".to_string(), &"".to_string()),
empty
);
// Test adds-only
assert_eq!(
simple_format_merged(&"".to_string(), &"a\n".to_string()),
["".to_string() + NEW + "+a" + NORMAL]
);
assert_eq!(
simple_format_merged(&"".to_string(), &"a\nb\n".to_string()),
[
"".to_string() + NEW + "+a" + NORMAL,
"".to_string() + NEW + "+b" + NORMAL,
]
);
// Test removes-only
assert_eq!(
simple_format_merged(&"a\n".to_string(), &"".to_string()),
["".to_string() + OLD + "-a" + NORMAL]
);
assert_eq!(
simple_format_merged(&"a\nb\n".to_string(), &"".to_string()),
[
"".to_string() + OLD + "-a" + NORMAL,
"".to_string() + OLD + "-b" + NORMAL,
]
);
}
#[test]
fn test_quote_change() {
let result = format(&"<quotes>\n".to_string(), &"[quotes]\n".to_string());
assert_eq!(
result,
[
format!(
"{}-{}<{}quotes{}>{}",
OLD, INVERSE_VIDEO, NOT_INVERSE_VIDEO, INVERSE_VIDEO, NORMAL
),
format!(
"{}+{}[{}quotes{}]{}",
NEW, INVERSE_VIDEO, NOT_INVERSE_VIDEO, INVERSE_VIDEO, NORMAL
),
]
)
}
#[test]
fn test_almost_empty_changes() {
let result = format(&"x\n".to_string(), &"".to_string());
assert_eq!(result, [format!("{}-x{}", OLD, NORMAL),]);
let result = format(&"".to_string(), &"x\n".to_string());
assert_eq!(result, [format!("{}+x{}", NEW, NORMAL),]);
}
#[test]
fn test_is_large_byte_count_change() {
assert_eq!(is_large_byte_count_change("", ""), false);
assert_eq!(
is_large_byte_count_change("", &"x".repeat(SMALL_COUNT_CHANGE)),
false
);
assert_eq!(
is_large_byte_count_change("", &"x".repeat(SMALL_COUNT_CHANGE + 1)),
true
);
// Verify that doubling the length counts as large
let base_len = SMALL_COUNT_CHANGE * 2;
let double_len = base_len * 2;
let almost_double_len = double_len - 1;
assert_eq!(
is_large_byte_count_change(&"x".repeat(base_len), &"y".repeat(almost_double_len)),
false
);
assert_eq!(
is_large_byte_count_change(&"x".repeat(base_len), &"y".repeat(double_len)),
true
);
}
}
|
//! The module that contains the request code.
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
use std::io::Error;
use std::str::from_utf8;
use std::sync::mpsc::channel;
use std::time::Duration;
use super::Method;
use curl::easy::{Easy, List};
use futures::{BoxFuture, failed, Future};
use response::Response;
use tokio_core::reactor::Handle;
use tokio_curl::Session;
use url::Url;
#[cfg(feature = "rustc-serialization")]
use rustc_serialize;
#[cfg(feature = "serde-serialization")]
use serde;
#[cfg(feature = "serde-serialization")]
use serde_json;
/// The default low byte rate threshold.
///
/// See [`Request::lowspeed_limit`](struct.Request.html#method.lowspeed_limit)
/// for more information.
pub const LOW_SPEED_LIMIT: u32 = 10;
/// The default low speed time threshold in seconds.
///
/// See [`Request::lowspeed_limit`](struct.Request.html#method.lowspeed_limit)
/// for more information.
pub const LOW_SPEED_TIME: u32 = 10;
/// The default redirect threshold for a single request.
///
/// cURL will follow this many redirects by default before aborting
/// the request. See [`Request::max_redirects`](struct.Request.html#method.max_redirects)
/// for more information.
pub const MAX_REDIRECTS: u32 = 15;
/// Represents an HTTP request.
///
/// While this can be used directly (and _must_ be for special HTTP verbs, it is
/// preferred to use the [`get`](fn.get.html), [`post`](fn.post.html), etc. functions
/// since they are shorter.
pub struct Request {
body: Option<Vec<u8>>,
follow_redirects: bool,
handle: Option<Easy>,
headers: HashMap<String, String>,
lowspeed_limits: Option<(u32, Duration)>,
max_redirects: u32,
method: Method,
params: HashMap<String, Vec<String>>,
timeout: Option<Duration>,
url: Url
}
impl Request {
/// Creates a new instance of `Request`.
pub fn new(url: &Url, method: Method) -> Self {
Request {
body: None,
follow_redirects: true,
handle: None,
headers: HashMap::new(),
lowspeed_limits: Some((LOW_SPEED_LIMIT, Duration::from_secs(LOW_SPEED_TIME as u64))),
max_redirects: MAX_REDIRECTS,
method: method,
params: HashMap::new(),
timeout: None,
url: url.clone()
}
}
/// Sets the body of the request as raw byte array.
pub fn body(mut self, body: &AsRef<[u8]>) -> Self {
self.body = Some(Vec::from(body.as_ref()));
self
}
/// Sets the option whether to follow 3xx-redirects or not.
///
/// Defaults to `true`.
pub fn follow_redirects(mut self, follow: bool) -> Self {
self.follow_redirects = follow;
self
}
/// Sets an HTTP header for the request. Remove headers by passing
/// an empty value.
///
/// ## Duplicates
/// In spite of the W3C allowing multiple headers with the same name
/// (https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2),
/// we do not so that we get a cleaner and leaner API.
///
/// If you really need to specify multiple header values for a single
/// header, just set a comma-separated list here, as that, as per standards,
/// is equivalent to sending multiple headers with the same name (see link).
/// If your server code can't deal with that, go and burn. :P
pub fn header(mut self, name: &str, value: &str) -> Self {
if value.is_empty() {
self.headers.remove(name);
} else {
self.headers.insert(name.to_owned(), value.to_owned());
}
self
}
/// Serializes the given object to JSON and uses that as the request body.
/// Also automatically sets the `Content-Type` to `application/json`.
#[cfg(feature = "rustc-serialization")]
pub fn json<T: rustc_serialize::Encodable>(self, body: &T) -> Self {
self.set_json(rustc_serialize::json::encode(body).unwrap().into_bytes())
}
/// Serializes the given object to JSON and uses that as the request body.
/// Also automatically sets the `Content-Type` to `application/json`.
#[cfg(feature = "serde-serialization")]
pub fn json<T: serde::Serialize>(self, body: &T) -> Self {
self.set_json(serde_json::to_vec(body).unwrap())
}
/// Sets the thresholds which, when reached, aborts a download due to too
/// low speeds.
///
/// Pass 0 for either parameter to disable lowspeed limiting.
///
/// ## Remarks
/// `bytes` sets the minimum average amount of bytes transferred in `per_duration`
/// time. If this number is not reached, cURL will abort the transfer because the transfer
/// speed is too low.
///
/// The values here default to [`LOW_SPEED_LIMIT`](constant.LOW_SPEED_LIMIT.html) and
/// [`LOW_SPEED_TIME`](constant.LOW_SPEED_TIME.html).
pub fn lowspeed_limit(mut self, bytes: u32, per_duration: Duration) -> Self {
self.lowspeed_limits = if bytes > 0 && per_duration > Duration::from_secs(0) {
Some((bytes, per_duration))
} else {
None
};
self
}
/// Sets the maximum amount of redirects cURL will follow when
/// [`Request::follow_redirects`](#method.follow_redirects) is
/// enabled.
pub fn max_redirects(mut self, max_redirects: u32) -> Self {
self.max_redirects = max_redirects;
self
}
/// Adds a URL parameter to the request.
///
/// ## Duplicates
/// Duplicates are allowed to enable things like query parameters that use
/// PHP array syntax (`&key[]=value`).
pub fn param(mut self, name: &str, value: &str) -> Self {
let value = value.to_owned();
match self.params.entry(name.to_owned()) {
Entry::Occupied(mut e) => e.get_mut().push(value),
Entry::Vacant(e) => { e.insert(vec![value]); () }
};
self
}
/// Creates a new `Session` on the specified event loop to send the HTTP request through
/// and returns a future that fires off the request, parses the response and resolves to
/// a `Response`-struct on success.
pub fn send(self, h: Handle) -> BoxFuture<Response, Error> {
self.send_with_session(&Session::new(h))
}
/// Uses the given `Session` to send the HTTP request through and returns a future that
/// fires off the request, parses the response and resolves to a `Response`-struct on success.
pub fn send_with_session(mut self, session: &Session) -> BoxFuture<Response, Error> {
{
let mut query_pairs = self.url.query_pairs_mut();
for (key, values) in self.params {
for value in values {
query_pairs.append_pair(&key, &value);
}
}
}
let headers = {
let mut list = List::new();
for (key, value) in self.headers {
list.append(&format!("{}: {}", key.trim(), value.trim())).expect("Failed to append header value to (native cURL) header list.");
}
list
};
let mut easy = self.handle.unwrap_or_else(|| Easy::new());
let (header_tx, header_rx) = channel();
let (body_tx, body_rx) = channel();
let config_res = {
// Make the borrow checker happy
let body = self.body;
let follow_redirects = self.follow_redirects;
let lowspeed_limits = self.lowspeed_limits;
let max_redirects = self.max_redirects;
let method = self.method;
let timeout = self.timeout;
let url = self.url;
let mut first_header = true;
// We cannot use try! here, since we're dealing with futures, not with Results
Ok(())
.and_then(|_| easy.accept_encoding(""))
.and_then(|_| easy.custom_request(method.as_ref()))
.and_then(|_| if follow_redirects {
easy.follow_location(true)
.and_then(|_| easy.max_redirections(max_redirects))
} else {
Ok(())
})
.and_then(|_| easy.header_function(move |header| {
match from_utf8(header) {
Ok(s) => {
let s = s.trim(); // Headers are \n-separated
if !first_header && s.len() > 0 { // First header is HTTP status line, don't want that
let _ = header_tx.send(s.to_owned());
}
first_header = false;
true
},
Err(_) => false
}
}))
.and_then(|_| easy.http_headers(headers))
.and_then(|_| if let Some((bytes, per_time)) = lowspeed_limits {
easy.low_speed_limit(bytes)
.and_then(|_| easy.low_speed_time(per_time))
} else {
Ok(())
})
.and_then(|_| if method == Method::Head {
easy.nobody(true)
} else {
Ok(())
})
.and_then(|_| if let Some(ref body) = body {
easy.post_fields_copy(body)
} else {
Ok(())
})
.and_then(|_| if let Some(timeout) = timeout {
easy.timeout(timeout)
} else {
Ok(())
})
.and_then(|_| easy.url(url.as_str()))
.and_then(|_| easy.write_function(move |data| {
let _ = body_tx.send(Vec::from(data));
Ok(data.len())
}))
};
match config_res {
Ok(_) => session.perform(easy)
.map(move |ez| {
let body = body_rx.try_iter().fold(Vec::new(), |mut data, slice| {
data.extend(slice);
data
});
let headers = header_rx.try_iter().collect::<Vec<_>>();
(ez, headers, body)
})
.map(|(ez, headers, body)| Response::new(ez, headers, body))
.map_err(|err| err.into_error())
.boxed(),
Err(error) => failed(error.into()).boxed()
}
}
/// Set the maximum time the request is allowed to take.
///
/// Disabled by default in favor of [`lowspeed_limit`]
pub fn timeout(mut self, duration: Duration) -> Self {
self.timeout = Some(duration);
self
}
/// Uses the given cURL handle in the request process reusing its resources
/// and improving performance.
///
/// This is solely a way to improve performance, it is not necessary to call
/// this method prior to firing off the request. The easy handle will be created
/// automatically if necessary.
pub fn use_handle(mut self, handle: Easy) -> Self {
self.handle = Some(handle);
self
}
#[cfg(any(feature = "rustc-serialization", feature = "serde-serialization"))]
fn set_json(mut self, body: Vec<u8>) -> Self {
self.body = Some(body);
self.header("Content-Type", "application/json")
}
}
impl Debug for Request {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
let len = if let Some(ref body) = self.body {
body.len() as isize
} else {
-1isize
};
fmt.debug_struct(stringify!(Request))
.field("body_len", &len)
.field("follow_redirects", &self.follow_redirects)
.field("headers", &self.headers)
.field("method", &self.method)
.field("params", &self.params)
.field("reuses_handle", &self.handle.is_some())
.field("url", &self.url)
.finish()
}
}
impl Display for Request {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "{} {}", self.method, self.url)
}
}
#[cfg(test)]
mod tests {
use ::{Method, Request};
use url::Url;
#[cfg(feature = "rustc-serialization")]
use rustc_serialize;
#[cfg(feature = "serde-serialization")]
use serde_json;
#[cfg_attr(feature = "rustc-serialization", derive(RustcEncodable, RustcDecodable))]
#[cfg_attr(feature = "serde-serialization", derive(Serialize, Deserialize))]
struct TestPayload {
a: u32,
b: u32
}
#[test]
fn test_payload() {
let r = Request::new(&Url::parse("http://google.com/").unwrap(), Method::Get)
.body(&get_serialized_payload());
assert!(r.body.is_some());
}
#[cfg(feature = "rustc-serialization")]
fn get_serialized_payload() -> Vec<u8> {
rustc_serialize::json::encode(&TestPayload { a: 10, b: 15 }).unwrap().into_bytes()
}
#[cfg(feature = "serde-serialization")]
fn get_serialized_payload() -> Vec<u8> {
serde_json::to_vec(&TestPayload { a: 10, b: 15}).unwrap()
}
}
Trim query parameters before appending them to the URL
//! The module that contains the request code.
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
use std::io::Error;
use std::str::from_utf8;
use std::sync::mpsc::channel;
use std::time::Duration;
use super::Method;
use curl::easy::{Easy, List};
use futures::{BoxFuture, failed, Future};
use response::Response;
use tokio_core::reactor::Handle;
use tokio_curl::Session;
use url::Url;
#[cfg(feature = "rustc-serialization")]
use rustc_serialize;
#[cfg(feature = "serde-serialization")]
use serde;
#[cfg(feature = "serde-serialization")]
use serde_json;
/// The default low byte rate threshold.
///
/// See [`Request::lowspeed_limit`](struct.Request.html#method.lowspeed_limit)
/// for more information.
pub const LOW_SPEED_LIMIT: u32 = 10;
/// The default low speed time threshold in seconds.
///
/// See [`Request::lowspeed_limit`](struct.Request.html#method.lowspeed_limit)
/// for more information.
pub const LOW_SPEED_TIME: u32 = 10;
/// The default redirect threshold for a single request.
///
/// cURL will follow this many redirects by default before aborting
/// the request. See [`Request::max_redirects`](struct.Request.html#method.max_redirects)
/// for more information.
pub const MAX_REDIRECTS: u32 = 15;
/// Represents an HTTP request.
///
/// While this can be used directly (and _must_ be for special HTTP verbs, it is
/// preferred to use the [`get`](fn.get.html), [`post`](fn.post.html), etc. functions
/// since they are shorter.
pub struct Request {
body: Option<Vec<u8>>,
follow_redirects: bool,
handle: Option<Easy>,
headers: HashMap<String, String>,
lowspeed_limits: Option<(u32, Duration)>,
max_redirects: u32,
method: Method,
params: HashMap<String, Vec<String>>,
timeout: Option<Duration>,
url: Url
}
impl Request {
/// Creates a new instance of `Request`.
pub fn new(url: &Url, method: Method) -> Self {
Request {
body: None,
follow_redirects: true,
handle: None,
headers: HashMap::new(),
lowspeed_limits: Some((LOW_SPEED_LIMIT, Duration::from_secs(LOW_SPEED_TIME as u64))),
max_redirects: MAX_REDIRECTS,
method: method,
params: HashMap::new(),
timeout: None,
url: url.clone()
}
}
/// Sets the body of the request as raw byte array.
pub fn body(mut self, body: &AsRef<[u8]>) -> Self {
self.body = Some(Vec::from(body.as_ref()));
self
}
/// Sets the option whether to follow 3xx-redirects or not.
///
/// Defaults to `true`.
pub fn follow_redirects(mut self, follow: bool) -> Self {
self.follow_redirects = follow;
self
}
/// Sets an HTTP header for the request. Remove headers by passing
/// an empty value.
///
/// ## Duplicates
/// In spite of the W3C allowing multiple headers with the same name
/// (https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2),
/// we do not so that we get a cleaner and leaner API.
///
/// If you really need to specify multiple header values for a single
/// header, just set a comma-separated list here, as that, as per standards,
/// is equivalent to sending multiple headers with the same name (see link).
/// If your server code can't deal with that, go and burn. :P
pub fn header(mut self, name: &str, value: &str) -> Self {
if value.is_empty() {
self.headers.remove(name);
} else {
self.headers.insert(name.to_owned(), value.to_owned());
}
self
}
/// Serializes the given object to JSON and uses that as the request body.
/// Also automatically sets the `Content-Type` to `application/json`.
#[cfg(feature = "rustc-serialization")]
pub fn json<T: rustc_serialize::Encodable>(self, body: &T) -> Self {
self.set_json(rustc_serialize::json::encode(body).unwrap().into_bytes())
}
/// Serializes the given object to JSON and uses that as the request body.
/// Also automatically sets the `Content-Type` to `application/json`.
#[cfg(feature = "serde-serialization")]
pub fn json<T: serde::Serialize>(self, body: &T) -> Self {
self.set_json(serde_json::to_vec(body).unwrap())
}
/// Sets the thresholds which, when reached, aborts a download due to too
/// low speeds.
///
/// Pass 0 for either parameter to disable lowspeed limiting.
///
/// ## Remarks
/// `bytes` sets the minimum average amount of bytes transferred in `per_duration`
/// time. If this number is not reached, cURL will abort the transfer because the transfer
/// speed is too low.
///
/// The values here default to [`LOW_SPEED_LIMIT`](constant.LOW_SPEED_LIMIT.html) and
/// [`LOW_SPEED_TIME`](constant.LOW_SPEED_TIME.html).
pub fn lowspeed_limit(mut self, bytes: u32, per_duration: Duration) -> Self {
self.lowspeed_limits = if bytes > 0 && per_duration > Duration::from_secs(0) {
Some((bytes, per_duration))
} else {
None
};
self
}
/// Sets the maximum amount of redirects cURL will follow when
/// [`Request::follow_redirects`](#method.follow_redirects) is
/// enabled.
pub fn max_redirects(mut self, max_redirects: u32) -> Self {
self.max_redirects = max_redirects;
self
}
/// Adds a URL parameter to the request.
///
/// ## Duplicates
/// Duplicates are allowed to enable things like query parameters that use
/// PHP array syntax (`&key[]=value`).
pub fn param(mut self, name: &str, value: &str) -> Self {
let value = value.to_owned();
match self.params.entry(name.to_owned()) {
Entry::Occupied(mut e) => e.get_mut().push(value),
Entry::Vacant(e) => { e.insert(vec![value]); () }
};
self
}
/// Creates a new `Session` on the specified event loop to send the HTTP request through
/// and returns a future that fires off the request, parses the response and resolves to
/// a `Response`-struct on success.
pub fn send(self, h: Handle) -> BoxFuture<Response, Error> {
self.send_with_session(&Session::new(h))
}
/// Uses the given `Session` to send the HTTP request through and returns a future that
/// fires off the request, parses the response and resolves to a `Response`-struct on success.
pub fn send_with_session(mut self, session: &Session) -> BoxFuture<Response, Error> {
{
let mut query_pairs = self.url.query_pairs_mut();
for (key, values) in self.params {
for value in values {
query_pairs.append_pair(key.trim(), value.trim());
}
}
}
let headers = {
let mut list = List::new();
for (key, value) in self.headers {
list.append(&format!("{}: {}", key.trim(), value.trim())).expect("Failed to append header value to (native cURL) header list.");
}
list
};
let mut easy = self.handle.unwrap_or_else(|| Easy::new());
let (header_tx, header_rx) = channel();
let (body_tx, body_rx) = channel();
let config_res = {
// Make the borrow checker happy
let body = self.body;
let follow_redirects = self.follow_redirects;
let lowspeed_limits = self.lowspeed_limits;
let max_redirects = self.max_redirects;
let method = self.method;
let timeout = self.timeout;
let url = self.url;
let mut first_header = true;
// We cannot use try! here, since we're dealing with futures, not with Results
Ok(())
.and_then(|_| easy.accept_encoding(""))
.and_then(|_| easy.custom_request(method.as_ref()))
.and_then(|_| if follow_redirects {
easy.follow_location(true)
.and_then(|_| easy.max_redirections(max_redirects))
} else {
Ok(())
})
.and_then(|_| easy.header_function(move |header| {
match from_utf8(header) {
Ok(s) => {
let s = s.trim(); // Headers are \n-separated
if !first_header && s.len() > 0 { // First header is HTTP status line, don't want that
let _ = header_tx.send(s.to_owned());
}
first_header = false;
true
},
Err(_) => false
}
}))
.and_then(|_| easy.http_headers(headers))
.and_then(|_| if let Some((bytes, per_time)) = lowspeed_limits {
easy.low_speed_limit(bytes)
.and_then(|_| easy.low_speed_time(per_time))
} else {
Ok(())
})
.and_then(|_| if method == Method::Head {
easy.nobody(true)
} else {
Ok(())
})
.and_then(|_| if let Some(ref body) = body {
easy.post_fields_copy(body)
} else {
Ok(())
})
.and_then(|_| if let Some(timeout) = timeout {
easy.timeout(timeout)
} else {
Ok(())
})
.and_then(|_| easy.url(url.as_str()))
.and_then(|_| easy.write_function(move |data| {
let _ = body_tx.send(Vec::from(data));
Ok(data.len())
}))
};
match config_res {
Ok(_) => session.perform(easy)
.map(move |ez| {
let body = body_rx.try_iter().fold(Vec::new(), |mut data, slice| {
data.extend(slice);
data
});
let headers = header_rx.try_iter().collect::<Vec<_>>();
(ez, headers, body)
})
.map(|(ez, headers, body)| Response::new(ez, headers, body))
.map_err(|err| err.into_error())
.boxed(),
Err(error) => failed(error.into()).boxed()
}
}
/// Set the maximum time the request is allowed to take.
///
/// Disabled by default in favor of [`lowspeed_limit`]
pub fn timeout(mut self, duration: Duration) -> Self {
self.timeout = Some(duration);
self
}
/// Uses the given cURL handle in the request process reusing its resources
/// and improving performance.
///
/// This is solely a way to improve performance, it is not necessary to call
/// this method prior to firing off the request. The easy handle will be created
/// automatically if necessary.
pub fn use_handle(mut self, handle: Easy) -> Self {
self.handle = Some(handle);
self
}
#[cfg(any(feature = "rustc-serialization", feature = "serde-serialization"))]
fn set_json(mut self, body: Vec<u8>) -> Self {
self.body = Some(body);
self.header("Content-Type", "application/json")
}
}
impl Debug for Request {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
let len = if let Some(ref body) = self.body {
body.len() as isize
} else {
-1isize
};
fmt.debug_struct(stringify!(Request))
.field("body_len", &len)
.field("follow_redirects", &self.follow_redirects)
.field("headers", &self.headers)
.field("method", &self.method)
.field("params", &self.params)
.field("reuses_handle", &self.handle.is_some())
.field("url", &self.url)
.finish()
}
}
impl Display for Request {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
write!(fmt, "{} {}", self.method, self.url)
}
}
#[cfg(test)]
mod tests {
use ::{Method, Request};
use url::Url;
#[cfg(feature = "rustc-serialization")]
use rustc_serialize;
#[cfg(feature = "serde-serialization")]
use serde_json;
#[cfg_attr(feature = "rustc-serialization", derive(RustcEncodable, RustcDecodable))]
#[cfg_attr(feature = "serde-serialization", derive(Serialize, Deserialize))]
struct TestPayload {
a: u32,
b: u32
}
#[test]
fn test_payload() {
let r = Request::new(&Url::parse("http://google.com/").unwrap(), Method::Get)
.body(&get_serialized_payload());
assert!(r.body.is_some());
}
#[cfg(feature = "rustc-serialization")]
fn get_serialized_payload() -> Vec<u8> {
rustc_serialize::json::encode(&TestPayload { a: 10, b: 15 }).unwrap().into_bytes()
}
#[cfg(feature = "serde-serialization")]
fn get_serialized_payload() -> Vec<u8> {
serde_json::to_vec(&TestPayload { a: 10, b: 15}).unwrap()
}
} |
//! This module contains the logic used to resolve identifiers to their locals or globals.
//!
//! Internally, this module also performs the register allocation for all locals. This makes it
//! easy to reference upvalues (see `UpvalDesc` in program.rs) and makes the bytecode emitter do
//! less work.
use ast::*;
use visit::*;
use program::UpvalDesc;
use span::{Span, Spanned};
use std::mem;
use std::collections::HashMap;
use std::default::Default;
/// Data used by the resolver, associated to a function
struct FuncData {
locals: Vec<String>,
/// Upvalues referenced from within the function
upvals: Vec<UpvalDesc>,
upval_names: Vec<String>,
/// Maps known upvalue names to their id
upval_map: HashMap<String, usize>,
/// Stack of lists of locals. Tracks all active scopes and thus all available locals.
scopes: Vec<Vec<usize>>,
}
impl FuncData {
fn new(locals: Vec<String>) -> FuncData {
FuncData {
locals: locals,
upvals: vec![],
upval_names: vec![],
upval_map: Default::default(),
scopes: vec![],
}
}
/// Registers an upvalue and returns its id
fn add_upval(&mut self, name: String, desc: UpvalDesc) -> usize {
let id = self.upvals.len();
self.upvals.push(desc);
self.upval_names.push(name.clone());
self.upval_map.insert(name, id);
id
}
/// Finds a reachable local (no upvalues are considered) with the given name and returns its id
fn get_local(&self, name: &String) -> Option<usize> {
// scan backwards through scopes
let mut level = self.scopes.len() - 1;
loop {
for id in &self.scopes[level] {
if *name == self.locals[*id] {
return Some(*id);
}
}
if level == 0 { break; }
level -= 1;
}
None
}
}
/// A resolver will resolve any `VNamed` references in a function
struct Resolver {
/// Stack of active functions
funcs: Vec<FuncData>,
}
impl Resolver {
/// Finds an upvalue in a parent function
fn find_upval(&mut self, name: &String, userlvl: usize) -> Option<usize> {
if userlvl == 0 {
return None;
}
// search parent functions for locals / upvalues that match
if let Some(id) = self.funcs[userlvl - 1].get_local(name) {
return Some(self.funcs[userlvl].add_upval(name.clone(), UpvalDesc::Local(id)));
} else {
if let Some(id) = self.find_upval(name, userlvl - 1) {
return Some(self.funcs[userlvl].add_upval(name.clone(), UpvalDesc::Upval(id)));
} else {
return None;
}
}
None
}
/// Searches for an upvalue with the given name
fn get_upval(&mut self, name: &String) -> Option<usize> {
let level = self.funcs.len() - 1;
{
// search known upvals first
let data = &self.funcs[level];
if let Some(id) = data.upval_map.get(name) {
return Some(*id);
}
}
self.find_upval(name, level)
}
/// Declares a new local with the given name in the currently active scope and function.
fn add_local(&mut self, name: String) -> usize {
let level = self.funcs.len() - 1;
// this might be a redeclaration, in which case we ignore it (the emitter handles it)
let func = &mut self.funcs[level];
let scopelvl = func.scopes.len() - 1;
{
let scope = &func.scopes[scopelvl];
for id in scope {
let lname = &func.locals[*id];
if *lname == name {
// already declared
return *id;
}
}
}
// create new local
let scope = &mut func.scopes[scopelvl];
let id = func.locals.len();
func.locals.push(name);
scope.push(id);
id
}
/// Resolves a block and declares a list of locals inside of it
fn resolve_block(&mut self, b: &mut Block, locals: Vec<String>) {
let level = self.funcs.len() - 1;
self.funcs[level].scopes.push(vec![]);
for name in locals {
self.add_local(name);
}
walk_block(b, self);
let data = &mut self.funcs[level];
let scope = data.scopes.pop().unwrap();
for id in scope {
let name = &data.locals[id];
b.localmap.insert(name.clone(), id);
}
}
/// Resolves the given named variable
fn resolve_var(&mut self, name: &String, span: Span) -> _Variable {
// first, try a local with that name
if let Some(id) = self.funcs[self.funcs.len() - 1].get_local(&name) {
VLocal(id)
} else {
// find an upvalue
if let Some(id) = self.get_upval(&name) {
VUpval(id)
} else {
// fall back to global access; resolve environment
let envvar = Box::new(Spanned::new(span,
self.resolve_var(&"_ENV".to_string(), span)));
VResGlobal(envvar, name.clone())
}
}
}
}
impl Visitor for Resolver {
fn visit_stmt(&mut self, s: &mut Stmt) {
match s.value {
SDecl(ref mut names, ref mut exprs) => {
for expr in exprs {
walk_expr(expr, self);
}
for name in names {
self.add_local(name.clone());
}
},
SLFunc(ref mut name, ref mut f) => {
self.add_local(name.clone());
self.visit_func(f);
},
SFor{ref var, ref mut body, ..} => {
self.resolve_block(body, vec![var.clone()]);
},
SForIn{ref vars, ref mut body, ..} => {
self.resolve_block(body, vars.clone());
},
_ => {
walk_stmt(s, self);
}
}
}
fn visit_var(&mut self, v: &mut Variable) {
{
let ref mut var = v.value;
if let VNamed(..) = *var {
let newvar = if let VNamed(ref name) = *var {
self.resolve_var(name, v.span)
} else { unreachable!(); };
mem::replace(var, newvar);
return;
}
}
walk_var(v, self);
}
fn visit_block(&mut self, b: &mut Block) {
self.resolve_block(b, vec![]);
}
fn visit_func(&mut self, f: &mut Function) {
let mut data = FuncData::new(f.params.clone());
if self.funcs.len() == 0 {
// root function, add implicit _ENV upvalue
data.add_upval("_ENV".to_string(), UpvalDesc::Upval(0)); // the UpvalDesc is ignored
}
self.funcs.push(data);
self.visit_block(&mut f.value.body);
let data = self.funcs.pop().unwrap();
f.locals = data.locals;
f.upvalues = data.upvals;
}
}
/// Resolves all locals used in the given block. Recursively resolves all blocks found inside.
///
/// Allows blocks inside the given block to access locals declared within the parent block
/// (assuming they are declared before the block). Does not allow the given block to access outer
/// locals.
///
/// This also resolves any `VGlobal` to `VResGlobal` and resolves the function environments.
pub fn resolve_func(f: &mut Function) {
Resolver {
funcs: vec![],
}.visit_func(f);
}
#[cfg(test)]
mod tests {
use super::*;
use parser::parse_main;
use span::Spanned;
use program::UpvalDesc;
use ast::*;
use std::default::Default;
macro_rules! localmap {
() => {{
::std::collections::HashMap::<String, usize>::new()
}};
( $($key:ident: $e:expr),* ) => {{
let mut m = ::std::collections::HashMap::<String, usize>::new();
$( m.insert(stringify!($key).to_string(), $e); )*
m
}};
}
#[test]
fn simple() {
let mut f = parse_main(r#"
i = 0
local a
do
local i
local j = i
i[j] = a
end
j = i
"#).unwrap();
resolve_func(&mut f);
assert_eq!(f.value.body, Block::with_locals(vec![
Spanned::default(SAssign(
vec![Spanned::default(VResGlobal(Box::new(Spanned::default(VUpval(0))), "i".to_string()))],
vec![Spanned::default(ELit(TInt(0)))],
)),
Spanned::default(SDecl(vec!["a".to_string()], vec![])),
Spanned::default(SDo(Block::with_locals(vec![
Spanned::default(SDecl(vec!["i".to_string()], vec![])),
Spanned::default(SDecl(vec!["j".to_string()], vec![
Spanned::default(EVar(Spanned::default(VLocal(1)))),
])),
Spanned::default(SAssign(
vec![Spanned::default(VIndex(
Box::new(Spanned::default(VLocal(1))),
Box::new(Spanned::default(EVar(Spanned::default(VLocal(2)))))
))],
vec![Spanned::default(EVar(Spanned::default(VLocal(0))))],
)),
], Default::default(), localmap!{ i: 1, j: 2 }))),
Spanned::default(SAssign(
vec![Spanned::default(
VResGlobal(Box::new(Spanned::default(VUpval(0))), "j".to_string())
)],
vec![Spanned::default(EVar(Spanned::default(
VResGlobal(Box::new(Spanned::default(VUpval(0))), "i".to_string())
)))],
)),
], Default::default(), localmap!{ a: 0 }));
}
#[test]
fn complex() {
let mut f = parse_main(r#"
local a
local function f()
f = nil // upvalue
local f = f // both
local function g() f = a end // chained upvalue + normal upvalue
end
"#).unwrap();
resolve_func(&mut f);
assert_eq!(f.value, _Function {
params: vec![],
varargs: true,
locals: vec!["a".to_string(), "f".to_string()],
upvalues: vec![UpvalDesc::Upval(0)], // `_ENV`; the UpvalDesc is ignored
body: Block::with_locals(vec![
Spanned::default(SDecl(vec!["a".to_string()], vec![])),
Spanned::default(SLFunc("f".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec!["f".to_string(), "g".to_string()],
upvalues: vec![UpvalDesc::Local(1), UpvalDesc::Local(0)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TNil))
])),
Spanned::default(SDecl(vec!["f".to_string()], vec![
Spanned::default(EVar(Spanned::default(VUpval(0))))
])),
Spanned::default(SLFunc("g".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec![],
upvalues: vec![UpvalDesc::Local(0), UpvalDesc::Upval(1)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(EVar(Spanned::default(VUpval(1))))
])),
], Default::default(), localmap!{}),
}))),
], Default::default(), localmap!{ f: 0, g: 1 }),
}))),
], Default::default(), localmap!{ a: 0, f: 1 }),
});
}
#[test]
fn env_simple() {
let mut f = parse_main("_ENV = 0").unwrap();
resolve_func(&mut f);
assert_eq!(f.value.body, Block::new(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TInt(0)))
])),
], Default::default()));
}
}
Added another test for environment resolution
//! This module contains the logic used to resolve identifiers to their locals or globals.
//!
//! Internally, this module also performs the register allocation for all locals. This makes it
//! easy to reference upvalues (see `UpvalDesc` in program.rs) and makes the bytecode emitter do
//! less work.
use ast::*;
use visit::*;
use program::UpvalDesc;
use span::{Span, Spanned};
use std::mem;
use std::collections::HashMap;
use std::default::Default;
/// Data used by the resolver, associated to a function
struct FuncData {
locals: Vec<String>,
/// Upvalues referenced from within the function
upvals: Vec<UpvalDesc>,
upval_names: Vec<String>,
/// Maps known upvalue names to their id
upval_map: HashMap<String, usize>,
/// Stack of lists of locals. Tracks all active scopes and thus all available locals.
scopes: Vec<Vec<usize>>,
}
impl FuncData {
fn new(locals: Vec<String>) -> FuncData {
FuncData {
locals: locals,
upvals: vec![],
upval_names: vec![],
upval_map: Default::default(),
scopes: vec![],
}
}
/// Registers an upvalue and returns its id
fn add_upval(&mut self, name: String, desc: UpvalDesc) -> usize {
let id = self.upvals.len();
self.upvals.push(desc);
self.upval_names.push(name.clone());
self.upval_map.insert(name, id);
id
}
/// Finds a reachable local (no upvalues are considered) with the given name and returns its id
fn get_local(&self, name: &String) -> Option<usize> {
// scan backwards through scopes
let mut level = self.scopes.len() - 1;
loop {
for id in &self.scopes[level] {
if *name == self.locals[*id] {
return Some(*id);
}
}
if level == 0 { break; }
level -= 1;
}
None
}
}
/// A resolver will resolve any `VNamed` references in a function
struct Resolver {
/// Stack of active functions
funcs: Vec<FuncData>,
}
impl Resolver {
/// Finds an upvalue in a parent function
fn find_upval(&mut self, name: &String, userlvl: usize) -> Option<usize> {
if userlvl == 0 {
return None;
}
// search parent functions for locals / upvalues that match
if let Some(id) = self.funcs[userlvl - 1].get_local(name) {
return Some(self.funcs[userlvl].add_upval(name.clone(), UpvalDesc::Local(id)));
} else {
if let Some(id) = self.find_upval(name, userlvl - 1) {
return Some(self.funcs[userlvl].add_upval(name.clone(), UpvalDesc::Upval(id)));
} else {
return None;
}
}
None
}
/// Searches for an upvalue with the given name
fn get_upval(&mut self, name: &String) -> Option<usize> {
let level = self.funcs.len() - 1;
{
// search known upvals first
let data = &self.funcs[level];
if let Some(id) = data.upval_map.get(name) {
return Some(*id);
}
}
self.find_upval(name, level)
}
/// Declares a new local with the given name in the currently active scope and function.
fn add_local(&mut self, name: String) -> usize {
let level = self.funcs.len() - 1;
// this might be a redeclaration, in which case we ignore it (the emitter handles it)
let func = &mut self.funcs[level];
let scopelvl = func.scopes.len() - 1;
{
let scope = &func.scopes[scopelvl];
for id in scope {
let lname = &func.locals[*id];
if *lname == name {
// already declared
return *id;
}
}
}
// create new local
let scope = &mut func.scopes[scopelvl];
let id = func.locals.len();
func.locals.push(name);
scope.push(id);
id
}
/// Resolves a block and declares a list of locals inside of it
fn resolve_block(&mut self, b: &mut Block, locals: Vec<String>) {
let level = self.funcs.len() - 1;
self.funcs[level].scopes.push(vec![]);
for name in locals {
self.add_local(name);
}
walk_block(b, self);
let data = &mut self.funcs[level];
let scope = data.scopes.pop().unwrap();
for id in scope {
let name = &data.locals[id];
b.localmap.insert(name.clone(), id);
}
}
/// Resolves the given named variable
fn resolve_var(&mut self, name: &String, span: Span) -> _Variable {
// first, try a local with that name
if let Some(id) = self.funcs[self.funcs.len() - 1].get_local(&name) {
VLocal(id)
} else {
// find an upvalue
if let Some(id) = self.get_upval(&name) {
VUpval(id)
} else {
// fall back to global access; resolve environment
let envvar = Box::new(Spanned::new(span,
self.resolve_var(&"_ENV".to_string(), span)));
VResGlobal(envvar, name.clone())
}
}
}
}
impl Visitor for Resolver {
fn visit_stmt(&mut self, s: &mut Stmt) {
match s.value {
SDecl(ref mut names, ref mut exprs) => {
for expr in exprs {
walk_expr(expr, self);
}
for name in names {
self.add_local(name.clone());
}
},
SLFunc(ref mut name, ref mut f) => {
self.add_local(name.clone());
self.visit_func(f);
},
SFor{ref var, ref mut body, ..} => {
self.resolve_block(body, vec![var.clone()]);
},
SForIn{ref vars, ref mut body, ..} => {
self.resolve_block(body, vars.clone());
},
_ => {
walk_stmt(s, self);
}
}
}
fn visit_var(&mut self, v: &mut Variable) {
{
let ref mut var = v.value;
if let VNamed(..) = *var {
let newvar = if let VNamed(ref name) = *var {
self.resolve_var(name, v.span)
} else { unreachable!(); };
mem::replace(var, newvar);
return;
}
}
walk_var(v, self);
}
fn visit_block(&mut self, b: &mut Block) {
self.resolve_block(b, vec![]);
}
fn visit_func(&mut self, f: &mut Function) {
let mut data = FuncData::new(f.params.clone());
if self.funcs.len() == 0 {
// root function, add implicit _ENV upvalue
data.add_upval("_ENV".to_string(), UpvalDesc::Upval(0)); // the UpvalDesc is ignored
}
self.funcs.push(data);
self.visit_block(&mut f.value.body);
let data = self.funcs.pop().unwrap();
f.locals = data.locals;
f.upvalues = data.upvals;
}
}
/// Resolves all locals used in the given block. Recursively resolves all blocks found inside.
///
/// Allows blocks inside the given block to access locals declared within the parent block
/// (assuming they are declared before the block). Does not allow the given block to access outer
/// locals.
///
/// This also resolves any `VGlobal` to `VResGlobal` and resolves the function environments.
pub fn resolve_func(f: &mut Function) {
Resolver {
funcs: vec![],
}.visit_func(f);
}
#[cfg(test)]
mod tests {
use super::*;
use parser::parse_main;
use span::Spanned;
use program::UpvalDesc;
use ast::*;
use std::default::Default;
macro_rules! localmap {
() => {{
::std::collections::HashMap::<String, usize>::new()
}};
( $($key:ident: $e:expr),* ) => {{
let mut m = ::std::collections::HashMap::<String, usize>::new();
$( m.insert(stringify!($key).to_string(), $e); )*
m
}};
}
#[test]
fn simple() {
let mut f = parse_main(r#"
i = 0
local a
do
local i
local j = i
i[j] = a
end
j = i
"#).unwrap();
resolve_func(&mut f);
assert_eq!(f.value.body, Block::with_locals(vec![
Spanned::default(SAssign(
vec![Spanned::default(VResGlobal(Box::new(Spanned::default(VUpval(0))), "i".to_string()))],
vec![Spanned::default(ELit(TInt(0)))],
)),
Spanned::default(SDecl(vec!["a".to_string()], vec![])),
Spanned::default(SDo(Block::with_locals(vec![
Spanned::default(SDecl(vec!["i".to_string()], vec![])),
Spanned::default(SDecl(vec!["j".to_string()], vec![
Spanned::default(EVar(Spanned::default(VLocal(1)))),
])),
Spanned::default(SAssign(
vec![Spanned::default(VIndex(
Box::new(Spanned::default(VLocal(1))),
Box::new(Spanned::default(EVar(Spanned::default(VLocal(2)))))
))],
vec![Spanned::default(EVar(Spanned::default(VLocal(0))))],
)),
], Default::default(), localmap!{ i: 1, j: 2 }))),
Spanned::default(SAssign(
vec![Spanned::default(
VResGlobal(Box::new(Spanned::default(VUpval(0))), "j".to_string())
)],
vec![Spanned::default(EVar(Spanned::default(
VResGlobal(Box::new(Spanned::default(VUpval(0))), "i".to_string())
)))],
)),
], Default::default(), localmap!{ a: 0 }));
}
#[test]
fn complex() {
let mut f = parse_main(r#"
local a
local function f()
f = nil // upvalue
local f = f // both
local function g() f = a end // chained upvalue + normal upvalue
end
"#).unwrap();
resolve_func(&mut f);
assert_eq!(f.value, _Function {
params: vec![],
varargs: true,
locals: vec!["a".to_string(), "f".to_string()],
upvalues: vec![UpvalDesc::Upval(0)], // `_ENV`; the UpvalDesc is ignored
body: Block::with_locals(vec![
Spanned::default(SDecl(vec!["a".to_string()], vec![])),
Spanned::default(SLFunc("f".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec!["f".to_string(), "g".to_string()],
upvalues: vec![UpvalDesc::Local(1), UpvalDesc::Local(0)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TNil))
])),
Spanned::default(SDecl(vec!["f".to_string()], vec![
Spanned::default(EVar(Spanned::default(VUpval(0))))
])),
Spanned::default(SLFunc("g".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec![],
upvalues: vec![UpvalDesc::Local(0), UpvalDesc::Upval(1)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(EVar(Spanned::default(VUpval(1))))
])),
], Default::default(), localmap!{}),
}))),
], Default::default(), localmap!{ f: 0, g: 1 }),
}))),
], Default::default(), localmap!{ a: 0, f: 1 }),
});
}
#[test]
fn env_simple() {
let mut f = parse_main("_ENV = 0").unwrap();
resolve_func(&mut f);
assert_eq!(f.value.body, Block::new(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TInt(0)))
])),
], Default::default()));
}
#[test]
fn env_complex() {
let mut f = parse_main(r#"
_ENV = nil
local _ENV
local function f() local _ENV i = nil end
local function g() _ENV = nil end
local function h() local function h1() r = nil end end
"#).unwrap();
resolve_func(&mut f);
assert_eq!(f.value, _Function {
params: vec![],
varargs: true,
locals: vec!["_ENV".to_string(), "f".to_string(), "g".to_string(), "h".to_string()],
upvalues: vec![UpvalDesc::Upval(0)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TNil))
])),
Spanned::default(SDecl(vec!["_ENV".to_string()], vec![])),
Spanned::default(SLFunc("f".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec!["_ENV".to_string()],
upvalues: vec![],
body: Block::with_locals(vec![
Spanned::default(SDecl(vec!["_ENV".to_string()], vec![])),
Spanned::default(SAssign(vec![
Spanned::default(VResGlobal(
Box::new(Spanned::default(VLocal(0))), "i".to_string()
)),
], vec![Spanned::default(ELit(TNil))])),
], Default::default(), localmap!{ _ENV: 0 }),
}))),
Spanned::default(SLFunc("g".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec![],
upvalues: vec![UpvalDesc::Local(0)],
body: Block::with_locals(vec![
Spanned::default(SAssign(vec![
Spanned::default(VUpval(0))
], vec![
Spanned::default(ELit(TNil))
])),
], Default::default(), localmap!{}),
}))),
Spanned::default(SLFunc("h".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec!["h1".to_string()],
upvalues: vec![UpvalDesc::Local(0)],
body: Block::with_locals(vec![
Spanned::default(SLFunc("h1".to_string(), Spanned::default(_Function {
params: vec![],
varargs: false,
locals: vec![],
upvalues: vec![UpvalDesc::Upval(0)],
body: Block::new(vec![
Spanned::default(SAssign(vec![
Spanned::default(VResGlobal(
Box::new(Spanned::default(VUpval(0))), "r".to_string()
))
], vec![
Spanned::default(ELit(TNil)),
])),
], Default::default()),
}))),
], Default::default(), localmap!{ h1: 0 }),
}))),
], Default::default(), localmap!{ _ENV: 0, f: 1, g: 2, h: 3 }),
});
}
}
|
//! Module with helpers for dealing with RFC 5322
use super::header::{Header, HeaderMap};
use super::rfc2047::decode_rfc2047;
#[stable]
pub const MIME_LINE_LENGTH: usize = 78us;
trait Rfc5322Character {
/// Is considered a special character by RFC 5322 Section 3.2.3
fn is_special(&self) -> bool;
/// Is considered to be a VCHAR by RFC 5234 Appendix B.1
fn is_vchar(&self) -> bool;
/// Is considered to be field text as defined by RFC 5322 Section 3.6.8
fn is_ftext(&self) -> bool;
fn is_atext(&self) -> bool {
self.is_vchar() && !self.is_special()
}
}
impl Rfc5322Character for char {
fn is_ftext(&self) -> bool {
match *self {
'!'...'9' | ';'...'~' => true,
_ => false,
}
}
fn is_special(&self) -> bool {
match *self {
'(' | ')' | '<' | '>' | '[' | ']' | ':' | ';' | '@' | '\\' | ',' | '.' | '\"' | ' ' => true,
_ => false
}
}
fn is_vchar(&self) -> bool {
match *self {
'!'...'~' => true,
_ => false,
}
}
}
/// RFC 5322 base parser for parsing
/// `atom`, `dot-atom`, `quoted-string`, `phrase`, `message`
///
/// This should prove useful for parsing other things that appear in RFC 5322,
/// as most are based off these core items.
///
/// It also implements a stack for tracking the position.
/// This allows the simple implementation of backtracking, by pushing the position
/// before a test and popping it if the test should fail.
#[unstable]
pub struct Rfc5322Parser<'s> {
s: &'s str,
pos: usize,
pos_stack: Vec<usize>,
}
impl<'s> Rfc5322Parser<'s> {
/// Make a new parser, initialized with the given string.
#[unstable]
pub fn new(source: &'s str) -> Rfc5322Parser<'s> {
Rfc5322Parser {
s: source,
pos: 0us,
pos_stack: Vec::new(),
}
}
/// Push the current position onto the stack.
#[unstable]
pub fn push_position(&mut self) {
self.pos_stack.push(self.pos);
}
/// Move the position back to the last entry pushed
#[unstable]
pub fn pop_position(&mut self) {
match self.pos_stack.pop() {
Some(pos) => { self.pos = pos; },
None => panic!("Popped position stack too far"),
}
}
/// Consume a message from the input.
///
/// Returns as a map of the headers and the body text.
///
/// A message is defined as:
///
/// `fields = *field
/// body = text
/// message = fields CRLF body`
#[unstable]
pub fn consume_message(&mut self) -> Option<(HeaderMap, String)> {
let mut headers = HeaderMap::new();
while !self.eof() {
let header = self.consume_header();
if header.is_some() {
headers.insert(header.unwrap());
} else {
// Check end of headers as marked by CRLF
if !self.eof() && self.peek_linebreak() {
assert!(self.consume_linebreak());
}
break;
}
}
// Whatever remains is the body
let body = self.s.slice_chars(self.pos, self.s.len()).to_string();
self.pos = self.s.len();
Some((headers, body))
}
/// Consume a header from the input.
///
/// A header is defined as:
///
/// `ftext = "!".."9" / ";".."~"
/// field-name = 1*ftext
/// field = field-name *LWSP ":" unstructured`
#[unstable]
pub fn consume_header(&mut self) -> Option<Header> {
let last_pos = self.pos;
// Parse field-name
let field_name = self.consume_while(|c| { c.is_ftext() });
self.consume_linear_whitespace();
if field_name.len() == 0 || self.eof() || self.peek() != ':' {
// Fail to parse if we didn't see a field, we're at the end of input
// or we haven't just seen a ":"
self.pos = last_pos;
None
} else {
// Consume the ":" and any leading whitespace
self.consume_char();
self.consume_linear_whitespace();
let field_value = self.consume_unstructured();
assert!(self.consume_linebreak());
Some(Header::new(field_name, field_value))
}
}
/// Consume an unstructured from the input.
#[unstable]
pub fn consume_unstructured(&mut self) -> String {
let mut result = String::new();
while !self.eof() {
if self.peek_linebreak() {
// Check for folding whitespace, if it wasn't, then
// we're done parsing
if !self.consume_folding_whitespace() {
break;
}
}
result.push_str(self.consume_while(|c| {
c.is_vchar() || c == ' ' || c == '\t'
}).as_slice())
}
result
}
/// Consume folding whitespace.
///
/// This is a CRLF followed by one or more whitespace character.
///
/// Returns true if whitespace was consume
#[unstable]
pub fn consume_folding_whitespace(&mut self) -> bool {
// Remember where we were, in case this isn't folding whitespace
let current_position = self.pos;
let is_fws = if !self.eof() && self.consume_linebreak() {
match self.consume_char() {
' ' | '\t' => true,
_ => false,
}
} else {
false
};
if is_fws {
// This was a folding whitespace, so consume all linear whitespace
self.consume_linear_whitespace();
} else {
// Reset back if we didn't see a folding whitespace
self.pos = current_position;
}
is_fws
}
/// Consume a word from the input.
///
/// A word is defined as:
///
/// `word = atom / quoted-string`
///
/// If `allow_dot_atom` is true, then `atom` can be a `dot-atom` in this phrase.
#[unstable]
pub fn consume_word(&mut self, allow_dot_atom: bool) -> Option<String> {
if self.peek() == '"' {
// Word is a quoted string
self.consume_quoted_string()
} else if self.peek().is_atext() {
// Word is an atom.
self.consume_atom(allow_dot_atom)
} else {
// Is not a word!
None
}
}
/// Consume a phrase from the input.
///
/// A phrase is defined as:
///
/// `phrase = 1*word`
///
/// If `allow_dot_atom` is true, then `atom` can be a `dot-atom` in this phrase.
#[unstable]
pub fn consume_phrase(&mut self, allow_dot_atom: bool) -> Option<String> {
let mut phrase = String::new();
while !self.eof() {
self.consume_linear_whitespace();
let word = if self.peek() == '"' {
// Word is a quoted string
self.consume_quoted_string()
} else if self.peek().is_atext() {
self.consume_atom(allow_dot_atom)
} else {
// If it's not a quoted string, or an atom, it's no longer
// in a phrase, so stop.
break
};
if word.is_some() {
// Unwrap word so it lives long enough...
// XXX: word in this scope is `String`, in the parent scope, is `Option<String>`
let word = word.unwrap();
let w_slice = word.as_slice();
// RFC 2047 encoded words start with =?, end with ?=
let decoded_word =
if w_slice.starts_with("=?") && w_slice.ends_with("?=") {
match decode_rfc2047(w_slice) {
Some(w) => w,
None => w_slice.to_string(),
}
} else {
w_slice.to_string()
};
// Make sure we put a leading space on, if this isn't the first insertion
if phrase.len() > 0 {
phrase.push_str(" ");
}
phrase.push_str(decoded_word.as_slice());
} else {
return None
}
}
if phrase.len() > 0 {
Some(phrase)
} else {
None
}
}
/// Consume a quoted string from the input
#[unstable]
pub fn consume_quoted_string(&mut self) -> Option<String> {
if self.peek() != '"' {
// Fail if we were called wrong
None
} else {
let mut quoted_string = String::new();
let mut inside_escape = false;
let mut terminated = false;
// Consume the leading "
self.consume_char();
while !terminated && !self.eof() {
match self.peek() {
'\\' if !inside_escape => {
// If we were not already being escaped, consume the
// escape character and mark that we're being escaped.
self.consume_char();
inside_escape = true;
},
'"' if !inside_escape => {
// If this is a DQUOTE and we haven't seen an escape character,
// consume it and mark that we should break from the loop
self.consume_char();
terminated = true;
},
_ => {
// Any old character gets pushed in
quoted_string.push(self.consume_char());
// Clear any escape character state we have
inside_escape = false;
},
}
}
if inside_escape || !terminated {
// Return an error state if we're still expecting a character
None
} else {
Some(quoted_string)
}
}
}
/// Consume an atom from the input.
///
/// If `allow_dot` is true, then also allow '.' to be considered as an
/// atext character.
#[unstable]
pub fn consume_atom(&mut self, allow_dot: bool) -> Option<String> {
if !self.peek().is_atext() {
None
} else {
Some(self.consume_while(|c| {
c.is_atext() || (allow_dot && c == '.')
}))
}
}
/// Consume LWSP (Linear whitespace)
#[unstable]
pub fn consume_linear_whitespace(&mut self) {
self.consume_while(|c| { c == '\t' || c == ' ' });
}
/// Consume a single character from the input.
#[inline]
#[unstable]
pub fn consume_char(&mut self) -> char {
if self.eof() {
// TODO: Consider making this return an Option<char>
panic!("Consuming beyond end of input");
}
let ch_range = self.s.char_range_at(self.pos);
self.pos = ch_range.next;
ch_range.ch
}
// Consume a linebreak: \r\n, \r or \n
#[unstable]
pub fn consume_linebreak(&mut self) -> bool {
if self.eof() {
return false;
}
let start_pos = self.pos;
match self.consume_char() {
'\r' => {
// Try to consume a single \n following the \r
if !self.eof() && self.peek() == '\n' {
self.consume_char();
}
true
},
'\n' => true,
_ => { self.pos = start_pos; false }
}
}
// Peek at the current character and determine whether it's (part of) a linebreak
#[unstable]
pub fn peek_linebreak(&mut self) -> bool {
match self.peek() {
'\r' | '\n' => true,
_ => false
}
}
/// Consume a set of characters, each passed to `test` until this function
/// returns false.
///
/// The position after calling this function will be pointing to the character
/// which caused a false result from `test`.
///
/// Returns the string of characters that returned true for the test function.
#[inline]
#[unstable]
pub fn consume_while<F: Fn(char) -> bool>(&mut self, test: F) -> String {
let start_pos = self.pos;
while !self.eof() && test(self.peek()) {
self.consume_char();
}
self.s.slice(start_pos, self.pos).to_string()
}
/// Peek at the current character.
///
/// Note that this does not do any bounds checking.
#[inline]
#[unstable]
pub fn peek(&self) -> char {
self.s.char_at(self.pos)
}
/// Returns true if we have reached the end of the input.
#[inline]
#[unstable]
pub fn eof(&self) -> bool {
self.pos >= self.s.len()
}
}
/// Type for constructing RFC 5322 messages
#[experimental]
pub struct Rfc5322Builder {
result: String
}
impl Rfc5322Builder {
/// Make a new builder, with an empty string
#[experimental]
pub fn new() -> Rfc5322Builder {
Rfc5322Builder {
result: "".to_string(),
}
}
#[experimental]
pub fn result(&self) -> &String {
&self.result
}
#[experimental]
pub fn emit_raw(&mut self, s: &str) {
self.result.push_str(s);
}
#[experimental]
pub fn emit_folded(&mut self, s: &str) {
let mut pos = 0us;
let mut cur_len = 0us;
let mut last_space = 0us;
let mut last_cut = 0us;
while pos < s.len() {
let c_range = s.char_range_at(pos);
let c = c_range.ch;
match c {
' ' => { last_space = pos; },
'\r' => { cur_len = 0; },
'\n' => { cur_len = 0; },
_ => {},
}
cur_len += 1;
// We've reached our line length, so
if cur_len >= MIME_LINE_LENGTH {
// Emit the string from the last place we cut it to the
// last space that we saw
self.emit_raw(s.slice(last_cut, last_space));
// ... and get us ready to put out the continuation
self.emit_raw("\r\n\t");
// Reset our counters
cur_len = 0;
last_cut = s.char_range_at(last_space).next;
}
pos = c_range.next;
}
// Finally, emit everything left in the string
self.emit_raw(s.slice_from(last_cut));
}
}
#[cfg(test)]
mod tests {
use super::*;
struct PhraseTestCase<'s> {
input: &'s str,
output: &'s str,
name: &'s str,
}
#[test]
fn test_consume_phrase() {
let tests = [
PhraseTestCase {
input: "\"test phrase\"", output: "test phrase",
name: "Simple quoted-string"
},
PhraseTestCase {
input: "\"test \\\"phrase\\\"\"", output: "test \"phrase\"",
name: "quoted-string with escape character"
},
PhraseTestCase {
input: "\"=?utf-8?q?encoded=20q-string?=\"", output: "encoded q-string",
name: "Encoded quoted-string"
},
PhraseTestCase {
input: "atom test", output: "atom test",
name: "Collection of atoms"
},
PhraseTestCase {
input: "=?utf-8?q?encoded=20atom?=", output: "encoded atom",
name: "Encoded atom"
},
PhraseTestCase {
input: "Mix of atoms \"and quoted strings\"", output: "Mix of atoms and quoted strings",
name: "Mix of atoms and quoted strings"
},
PhraseTestCase {
input: "=?utf-8?q?encoded=20atoms?= mixed with \"unencoded\" \"=?utf-8?b?YW5kIGVuY29kZWQgcS1zdHJpbmdz?=\"",
output: "encoded atoms mixed with unencoded and encoded q-strings",
name: "Mix of atoms, q-strings of differing encodings"
},
PhraseTestCase {
input: "\"John Smith\" <test@example.org>", output: "John Smith",
name: "Stop consuming phrase at \"special\" character",
}
];
for t in tests.iter() {
let mut p = Rfc5322Parser::new(t.input);
let phrase = p.consume_phrase(false);
assert!(phrase.is_some(), format!("{} returned Some", t.name));
let test_name = format!("{} == {} for {}", phrase.clone().unwrap(), t.output, t.name);
assert!(phrase.unwrap() == t.output.to_string(), test_name);
}
}
struct MessageTestCase<'s> {
input: &'s str,
headers: Vec<(&'s str, &'s str)>,
body: &'s str,
}
#[test]
fn test_consume_message() {
let tests = vec![
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\n\r\nBody",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
],
body: "Body",
},
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\n\r\nMultiline\r\nBody",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
],
body: "Multiline\r\nBody",
},
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\nTo: \"John Doe\" <john@example.org>\r\n\r\nMultiple headers",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
("To", "\"John Doe\" <john@example.org>"),
],
body: "Multiple headers",
},
MessageTestCase {
input: "Folded-Header: Some content that is \r\n\t wrapped with a tab.\r\n\r\nFolding whitespace test",
headers: vec![
("Folded-Header", "Some content that is wrapped with a tab."),
],
body: "Folding whitespace test",
},
MessageTestCase {
input: "Folded-Header: Some content that is \r\n wrapped with spaces.\r\n\r\nFolding whitespace test",
headers: vec![
("Folded-Header", "Some content that is wrapped with spaces."),
],
body: "Folding whitespace test",
},
];
for test in tests.iter() {
let mut p = Rfc5322Parser::new(test.input);
let message = p.consume_message();
match message {
Some((headers, body)) => {
assert_eq!(body, test.body.to_string());
for &(header_title, header_value) in test.headers.iter() {
let matching_headers = headers.find(&header_title.to_string()).unwrap();
assert!(matching_headers.iter().filter(|h| {
let val: String = h.get_value().unwrap();
val == header_value.to_string()
}).count() > 0);
}
},
None => panic!("Failed to parse message"),
};
}
}
#[test]
fn test_builder_folding() {
struct BuildFoldTest<'s> {
input: &'s str,
expected: &'s str,
}
let tests = vec![
BuildFoldTest {
input: "A long line that should get folded on a space at some point around here, possibly at this point.",
expected: "A long line that should get folded on a space at some point around here,\r\n\
\tpossibly at this point.",
},
BuildFoldTest {
input: "A long line that should get folded on a space at some point around here, possibly at this point. And yet more content that will get folded onto another line.",
expected: "A long line that should get folded on a space at some point around here,\r\n\
\tpossibly at this point. And yet more content that will get folded onto another\r\n\
\tline.",
},
];
for test in tests.into_iter() {
let mut gen = Rfc5322Builder::new();
gen.emit_folded(test.input);
assert_eq!(gen.result(), &test.expected.to_string());
}
}
}
Add unit test for "\n" instead of "\r\n" in RFC5322 parsing.
This (and the commit cherry picked before it) fixes #11
//! Module with helpers for dealing with RFC 5322
use super::header::{Header, HeaderMap};
use super::rfc2047::decode_rfc2047;
#[stable]
pub const MIME_LINE_LENGTH: usize = 78us;
trait Rfc5322Character {
/// Is considered a special character by RFC 5322 Section 3.2.3
fn is_special(&self) -> bool;
/// Is considered to be a VCHAR by RFC 5234 Appendix B.1
fn is_vchar(&self) -> bool;
/// Is considered to be field text as defined by RFC 5322 Section 3.6.8
fn is_ftext(&self) -> bool;
fn is_atext(&self) -> bool {
self.is_vchar() && !self.is_special()
}
}
impl Rfc5322Character for char {
fn is_ftext(&self) -> bool {
match *self {
'!'...'9' | ';'...'~' => true,
_ => false,
}
}
fn is_special(&self) -> bool {
match *self {
'(' | ')' | '<' | '>' | '[' | ']' | ':' | ';' | '@' | '\\' | ',' | '.' | '\"' | ' ' => true,
_ => false
}
}
fn is_vchar(&self) -> bool {
match *self {
'!'...'~' => true,
_ => false,
}
}
}
/// RFC 5322 base parser for parsing
/// `atom`, `dot-atom`, `quoted-string`, `phrase`, `message`
///
/// This should prove useful for parsing other things that appear in RFC 5322,
/// as most are based off these core items.
///
/// It also implements a stack for tracking the position.
/// This allows the simple implementation of backtracking, by pushing the position
/// before a test and popping it if the test should fail.
#[unstable]
pub struct Rfc5322Parser<'s> {
s: &'s str,
pos: usize,
pos_stack: Vec<usize>,
}
impl<'s> Rfc5322Parser<'s> {
/// Make a new parser, initialized with the given string.
#[unstable]
pub fn new(source: &'s str) -> Rfc5322Parser<'s> {
Rfc5322Parser {
s: source,
pos: 0us,
pos_stack: Vec::new(),
}
}
/// Push the current position onto the stack.
#[unstable]
pub fn push_position(&mut self) {
self.pos_stack.push(self.pos);
}
/// Move the position back to the last entry pushed
#[unstable]
pub fn pop_position(&mut self) {
match self.pos_stack.pop() {
Some(pos) => { self.pos = pos; },
None => panic!("Popped position stack too far"),
}
}
/// Consume a message from the input.
///
/// Returns as a map of the headers and the body text.
///
/// A message is defined as:
///
/// `fields = *field
/// body = text
/// message = fields CRLF body`
#[unstable]
pub fn consume_message(&mut self) -> Option<(HeaderMap, String)> {
let mut headers = HeaderMap::new();
while !self.eof() {
let header = self.consume_header();
if header.is_some() {
headers.insert(header.unwrap());
} else {
// Check end of headers as marked by CRLF
if !self.eof() && self.peek_linebreak() {
assert!(self.consume_linebreak());
}
break;
}
}
// Whatever remains is the body
let body = self.s.slice_chars(self.pos, self.s.len()).to_string();
self.pos = self.s.len();
Some((headers, body))
}
/// Consume a header from the input.
///
/// A header is defined as:
///
/// `ftext = "!".."9" / ";".."~"
/// field-name = 1*ftext
/// field = field-name *LWSP ":" unstructured`
#[unstable]
pub fn consume_header(&mut self) -> Option<Header> {
let last_pos = self.pos;
// Parse field-name
let field_name = self.consume_while(|c| { c.is_ftext() });
self.consume_linear_whitespace();
if field_name.len() == 0 || self.eof() || self.peek() != ':' {
// Fail to parse if we didn't see a field, we're at the end of input
// or we haven't just seen a ":"
self.pos = last_pos;
None
} else {
// Consume the ":" and any leading whitespace
self.consume_char();
self.consume_linear_whitespace();
let field_value = self.consume_unstructured();
assert!(self.consume_linebreak());
Some(Header::new(field_name, field_value))
}
}
/// Consume an unstructured from the input.
#[unstable]
pub fn consume_unstructured(&mut self) -> String {
let mut result = String::new();
while !self.eof() {
if self.peek_linebreak() {
// Check for folding whitespace, if it wasn't, then
// we're done parsing
if !self.consume_folding_whitespace() {
break;
}
}
result.push_str(self.consume_while(|c| {
c.is_vchar() || c == ' ' || c == '\t'
}).as_slice())
}
result
}
/// Consume folding whitespace.
///
/// This is a CRLF followed by one or more whitespace character.
///
/// Returns true if whitespace was consume
#[unstable]
pub fn consume_folding_whitespace(&mut self) -> bool {
// Remember where we were, in case this isn't folding whitespace
let current_position = self.pos;
let is_fws = if !self.eof() && self.consume_linebreak() {
match self.consume_char() {
' ' | '\t' => true,
_ => false,
}
} else {
false
};
if is_fws {
// This was a folding whitespace, so consume all linear whitespace
self.consume_linear_whitespace();
} else {
// Reset back if we didn't see a folding whitespace
self.pos = current_position;
}
is_fws
}
/// Consume a word from the input.
///
/// A word is defined as:
///
/// `word = atom / quoted-string`
///
/// If `allow_dot_atom` is true, then `atom` can be a `dot-atom` in this phrase.
#[unstable]
pub fn consume_word(&mut self, allow_dot_atom: bool) -> Option<String> {
if self.peek() == '"' {
// Word is a quoted string
self.consume_quoted_string()
} else if self.peek().is_atext() {
// Word is an atom.
self.consume_atom(allow_dot_atom)
} else {
// Is not a word!
None
}
}
/// Consume a phrase from the input.
///
/// A phrase is defined as:
///
/// `phrase = 1*word`
///
/// If `allow_dot_atom` is true, then `atom` can be a `dot-atom` in this phrase.
#[unstable]
pub fn consume_phrase(&mut self, allow_dot_atom: bool) -> Option<String> {
let mut phrase = String::new();
while !self.eof() {
self.consume_linear_whitespace();
let word = if self.peek() == '"' {
// Word is a quoted string
self.consume_quoted_string()
} else if self.peek().is_atext() {
self.consume_atom(allow_dot_atom)
} else {
// If it's not a quoted string, or an atom, it's no longer
// in a phrase, so stop.
break
};
if word.is_some() {
// Unwrap word so it lives long enough...
// XXX: word in this scope is `String`, in the parent scope, is `Option<String>`
let word = word.unwrap();
let w_slice = word.as_slice();
// RFC 2047 encoded words start with =?, end with ?=
let decoded_word =
if w_slice.starts_with("=?") && w_slice.ends_with("?=") {
match decode_rfc2047(w_slice) {
Some(w) => w,
None => w_slice.to_string(),
}
} else {
w_slice.to_string()
};
// Make sure we put a leading space on, if this isn't the first insertion
if phrase.len() > 0 {
phrase.push_str(" ");
}
phrase.push_str(decoded_word.as_slice());
} else {
return None
}
}
if phrase.len() > 0 {
Some(phrase)
} else {
None
}
}
/// Consume a quoted string from the input
#[unstable]
pub fn consume_quoted_string(&mut self) -> Option<String> {
if self.peek() != '"' {
// Fail if we were called wrong
None
} else {
let mut quoted_string = String::new();
let mut inside_escape = false;
let mut terminated = false;
// Consume the leading "
self.consume_char();
while !terminated && !self.eof() {
match self.peek() {
'\\' if !inside_escape => {
// If we were not already being escaped, consume the
// escape character and mark that we're being escaped.
self.consume_char();
inside_escape = true;
},
'"' if !inside_escape => {
// If this is a DQUOTE and we haven't seen an escape character,
// consume it and mark that we should break from the loop
self.consume_char();
terminated = true;
},
_ => {
// Any old character gets pushed in
quoted_string.push(self.consume_char());
// Clear any escape character state we have
inside_escape = false;
},
}
}
if inside_escape || !terminated {
// Return an error state if we're still expecting a character
None
} else {
Some(quoted_string)
}
}
}
/// Consume an atom from the input.
///
/// If `allow_dot` is true, then also allow '.' to be considered as an
/// atext character.
#[unstable]
pub fn consume_atom(&mut self, allow_dot: bool) -> Option<String> {
if !self.peek().is_atext() {
None
} else {
Some(self.consume_while(|c| {
c.is_atext() || (allow_dot && c == '.')
}))
}
}
/// Consume LWSP (Linear whitespace)
#[unstable]
pub fn consume_linear_whitespace(&mut self) {
self.consume_while(|c| { c == '\t' || c == ' ' });
}
/// Consume a single character from the input.
#[inline]
#[unstable]
pub fn consume_char(&mut self) -> char {
if self.eof() {
// TODO: Consider making this return an Option<char>
panic!("Consuming beyond end of input");
}
let ch_range = self.s.char_range_at(self.pos);
self.pos = ch_range.next;
ch_range.ch
}
// Consume a linebreak: \r\n, \r or \n
#[unstable]
pub fn consume_linebreak(&mut self) -> bool {
if self.eof() {
return false;
}
let start_pos = self.pos;
match self.consume_char() {
'\r' => {
// Try to consume a single \n following the \r
if !self.eof() && self.peek() == '\n' {
self.consume_char();
}
true
},
'\n' => true,
_ => { self.pos = start_pos; false }
}
}
// Peek at the current character and determine whether it's (part of) a linebreak
#[unstable]
pub fn peek_linebreak(&mut self) -> bool {
match self.peek() {
'\r' | '\n' => true,
_ => false
}
}
/// Consume a set of characters, each passed to `test` until this function
/// returns false.
///
/// The position after calling this function will be pointing to the character
/// which caused a false result from `test`.
///
/// Returns the string of characters that returned true for the test function.
#[inline]
#[unstable]
pub fn consume_while<F: Fn(char) -> bool>(&mut self, test: F) -> String {
let start_pos = self.pos;
while !self.eof() && test(self.peek()) {
self.consume_char();
}
self.s.slice(start_pos, self.pos).to_string()
}
/// Peek at the current character.
///
/// Note that this does not do any bounds checking.
#[inline]
#[unstable]
pub fn peek(&self) -> char {
self.s.char_at(self.pos)
}
/// Returns true if we have reached the end of the input.
#[inline]
#[unstable]
pub fn eof(&self) -> bool {
self.pos >= self.s.len()
}
}
/// Type for constructing RFC 5322 messages
#[experimental]
pub struct Rfc5322Builder {
result: String
}
impl Rfc5322Builder {
/// Make a new builder, with an empty string
#[experimental]
pub fn new() -> Rfc5322Builder {
Rfc5322Builder {
result: "".to_string(),
}
}
#[experimental]
pub fn result(&self) -> &String {
&self.result
}
#[experimental]
pub fn emit_raw(&mut self, s: &str) {
self.result.push_str(s);
}
#[experimental]
pub fn emit_folded(&mut self, s: &str) {
let mut pos = 0us;
let mut cur_len = 0us;
let mut last_space = 0us;
let mut last_cut = 0us;
while pos < s.len() {
let c_range = s.char_range_at(pos);
let c = c_range.ch;
match c {
' ' => { last_space = pos; },
'\r' => { cur_len = 0; },
'\n' => { cur_len = 0; },
_ => {},
}
cur_len += 1;
// We've reached our line length, so
if cur_len >= MIME_LINE_LENGTH {
// Emit the string from the last place we cut it to the
// last space that we saw
self.emit_raw(s.slice(last_cut, last_space));
// ... and get us ready to put out the continuation
self.emit_raw("\r\n\t");
// Reset our counters
cur_len = 0;
last_cut = s.char_range_at(last_space).next;
}
pos = c_range.next;
}
// Finally, emit everything left in the string
self.emit_raw(s.slice_from(last_cut));
}
}
#[cfg(test)]
mod tests {
use super::*;
struct PhraseTestCase<'s> {
input: &'s str,
output: &'s str,
name: &'s str,
}
#[test]
fn test_consume_phrase() {
let tests = [
PhraseTestCase {
input: "\"test phrase\"", output: "test phrase",
name: "Simple quoted-string"
},
PhraseTestCase {
input: "\"test \\\"phrase\\\"\"", output: "test \"phrase\"",
name: "quoted-string with escape character"
},
PhraseTestCase {
input: "\"=?utf-8?q?encoded=20q-string?=\"", output: "encoded q-string",
name: "Encoded quoted-string"
},
PhraseTestCase {
input: "atom test", output: "atom test",
name: "Collection of atoms"
},
PhraseTestCase {
input: "=?utf-8?q?encoded=20atom?=", output: "encoded atom",
name: "Encoded atom"
},
PhraseTestCase {
input: "Mix of atoms \"and quoted strings\"", output: "Mix of atoms and quoted strings",
name: "Mix of atoms and quoted strings"
},
PhraseTestCase {
input: "=?utf-8?q?encoded=20atoms?= mixed with \"unencoded\" \"=?utf-8?b?YW5kIGVuY29kZWQgcS1zdHJpbmdz?=\"",
output: "encoded atoms mixed with unencoded and encoded q-strings",
name: "Mix of atoms, q-strings of differing encodings"
},
PhraseTestCase {
input: "\"John Smith\" <test@example.org>", output: "John Smith",
name: "Stop consuming phrase at \"special\" character",
}
];
for t in tests.iter() {
let mut p = Rfc5322Parser::new(t.input);
let phrase = p.consume_phrase(false);
assert!(phrase.is_some(), format!("{} returned Some", t.name));
let test_name = format!("{} == {} for {}", phrase.clone().unwrap(), t.output, t.name);
assert!(phrase.unwrap() == t.output.to_string(), test_name);
}
}
struct MessageTestCase<'s> {
input: &'s str,
headers: Vec<(&'s str, &'s str)>,
body: &'s str,
}
#[test]
fn test_consume_message() {
let tests = vec![
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\n\r\nBody",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
],
body: "Body",
},
// Support parsing messages with \n instead of \r\n
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\n\nBody",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
],
body: "Body",
},
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\n\r\nMultiline\r\nBody",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
],
body: "Multiline\r\nBody",
},
MessageTestCase {
input: "From: \"Joe Blogs\" <joe@example.org>\r\nTo: \"John Doe\" <john@example.org>\r\n\r\nMultiple headers",
headers: vec![
("From", "\"Joe Blogs\" <joe@example.org>"),
("To", "\"John Doe\" <john@example.org>"),
],
body: "Multiple headers",
},
MessageTestCase {
input: "Folded-Header: Some content that is \r\n\t wrapped with a tab.\r\n\r\nFolding whitespace test",
headers: vec![
("Folded-Header", "Some content that is wrapped with a tab."),
],
body: "Folding whitespace test",
},
MessageTestCase {
input: "Folded-Header: Some content that is \r\n wrapped with spaces.\r\n\r\nFolding whitespace test",
headers: vec![
("Folded-Header", "Some content that is wrapped with spaces."),
],
body: "Folding whitespace test",
},
];
for test in tests.iter() {
let mut p = Rfc5322Parser::new(test.input);
let message = p.consume_message();
match message {
Some((headers, body)) => {
assert_eq!(body, test.body.to_string());
for &(header_title, header_value) in test.headers.iter() {
let matching_headers = headers.find(&header_title.to_string()).unwrap();
assert!(matching_headers.iter().filter(|h| {
let val: String = h.get_value().unwrap();
val == header_value.to_string()
}).count() > 0);
}
},
None => panic!("Failed to parse message"),
};
}
}
#[test]
fn test_builder_folding() {
struct BuildFoldTest<'s> {
input: &'s str,
expected: &'s str,
}
let tests = vec![
BuildFoldTest {
input: "A long line that should get folded on a space at some point around here, possibly at this point.",
expected: "A long line that should get folded on a space at some point around here,\r\n\
\tpossibly at this point.",
},
BuildFoldTest {
input: "A long line that should get folded on a space at some point around here, possibly at this point. And yet more content that will get folded onto another line.",
expected: "A long line that should get folded on a space at some point around here,\r\n\
\tpossibly at this point. And yet more content that will get folded onto another\r\n\
\tline.",
},
];
for test in tests.into_iter() {
let mut gen = Rfc5322Builder::new();
gen.emit_folded(test.input);
assert_eq!(gen.result(), &test.expected.to_string());
}
}
}
|
use channel;
use connection::Connection;
use protocol::{self, MethodFrame};
use table;
use table::TableEntry::{FieldTable, Bool, LongString};
use framing::Frame;
use amqp_error::{AMQPResult, AMQPError};
use std::sync::{Arc, Mutex};
use std::default::Default;
use std::collections::HashMap;
use std::sync::mpsc::{SyncSender, sync_channel};
use std::thread;
use std::cmp;
use url::{UrlParser, SchemeType};
const CHANNEL_BUFFER_SIZE :usize = 100;
#[derive(Debug)]
pub struct Options <'a> {
pub host: &'a str,
pub port: u16,
pub login: &'a str,
pub password: &'a str,
pub vhost: &'a str,
pub frame_max_limit: u32,
pub channel_max_limit: u16,
pub locale: &'a str
}
impl <'a> Default for Options <'a> {
fn default() -> Options <'a> {
Options {
host: "127.0.0.1", port: 5672, vhost: "/",
login: "guest", password: "guest",
frame_max_limit: 131072, channel_max_limit: 65535,
locale: "en_US"
}
}
}
pub struct Session {
connection: Connection,
channels: Arc<Mutex<HashMap<u16, SyncSender<Frame>> >>,
channel_max_limit: u16,
channel_zero: channel::Channel
}
impl Session {
/// Use `open_url` to create new amqp session from a "amqp url"
///
/// # Arguments
/// * `url_string`: The format is: `amqp://username:password@host:port/virtual_host`
///
/// Most of the params have their default, so you can just pass this:
/// `"amqp://localhost/"` and it will connect to rabbitmq server, running on `localhost` on port `65535`,
/// with login `"guest"`, password: `"guest"` to vhost `"/"`
pub fn open_url(url_string: &str) -> AMQPResult<Session> {
let default: Options = Default::default();
let mut url_parser = UrlParser::new();
url_parser.scheme_type_mapper(scheme_type_mapper);
let url = url_parser.parse(url_string).unwrap();
let vhost = url.serialize_path().unwrap_or(default.vhost.to_string());
let host = url.domain().unwrap_or(default.host);
let port = url.port().unwrap_or(default.port);
let login = url.username().and_then(|username| match username { "" => None, _ => Some(username)} ).unwrap_or(default.login);
let password = url.password().unwrap_or(default.password);
let opts = Options { host: host, port: port,
login: login, password: password,
vhost: &vhost, ..Default::default()};
Session::new(opts)
}
/// Initialize new rabbitmq session.
/// You can use default options:
/// # Example
/// ```no_run
/// use std::default::Default;
/// use amqp::session::{Options, Session};
/// let session = match Session::new(Options { .. Default::default() }){
/// Ok(session) => session,
/// Err(error) => panic!("Failed openning an amqp session: {:?}", error)
/// };
/// ```
pub fn new(options: Options) -> AMQPResult<Session> {
let connection = try!(Connection::open(options.host, options.port));
let (channel_sender, channel_receiver) = sync_channel(CHANNEL_BUFFER_SIZE); //channel0
let channels = Arc::new(Mutex::new(HashMap::new()));
let channel_zero = channel::Channel::new(0, channel_receiver, connection.clone());
try!(channels.lock().map_err(|_| AMQPError::SyncError)).insert(0, channel_sender);
let con1 = connection.clone();
let channels_clone = channels.clone();
thread::spawn( || Session::reading_loop(con1, channels_clone ) );
let mut session = Session {
connection: connection,
channels: channels,
channel_max_limit: 0,
channel_zero: channel_zero
};
try!(session.init(options));
Ok(session)
}
fn init(&mut self, options: Options) -> AMQPResult<()> {
debug!("Starting init session");
let frame = self.channel_zero.read(); //Start
let method_frame = MethodFrame::decode(frame);
let start : protocol::connection::Start = match method_frame.method_name(){
"connection.start" => protocol::Method::decode(method_frame).ok().unwrap(),
meth => panic!("Unexpected method frame: {:?}", meth) //In reality you would probably skip the frame and try to read another?
};
debug!("Received connection.start: {:?}", start);
// The client selects a security mechanism (Start-Ok).
// The server starts the authentication process, which uses the SASL challenge-response model. It sends
// the client a challenge (Secure).
// The client sends an authentication response (Secure-Ok). For example using the "plain" mechanism,
// the response consist of a login name and password.
// The server repeats the challenge (Secure) or moves to negotiation, sending a set of parameters such as
let mut client_properties = table::new();
let mut capabilities = table::new();
capabilities.insert("publisher_confirms".to_string(), Bool(true));
capabilities.insert("consumer_cancel_notify".to_string(), Bool(true));
capabilities.insert("exchange_exchange_bindings".to_string(), Bool(true));
capabilities.insert("basic.nack".to_string(), Bool(true));
capabilities.insert("connection.blocked".to_string(), Bool(true));
capabilities.insert("authentication_failure_close".to_string(), Bool(true));
client_properties.insert("capabilities".to_string(), FieldTable(capabilities));
client_properties.insert("product".to_string(), LongString("rust-amqp".to_string()));
client_properties.insert("platform".to_string(), LongString("rust".to_string()));
client_properties.insert("version".to_string(), LongString("0.1".to_string()));
client_properties.insert("information".to_string(), LongString("https://github.com/Antti/rust-amqp".to_string()));
debug!("Sending connection.start-ok");
let start_ok = protocol::connection::StartOk {
client_properties: client_properties, mechanism: "PLAIN".to_string(),
response: format!("\0{}\0{}", options.login, options.password), locale: options.locale.to_string()};
let tune : protocol::connection::Tune = try!(self.channel_zero.rpc(&start_ok, "connection.tune"));
self.channel_max_limit = negotiate(tune.channel_max, self.channel_max_limit);
self.connection.frame_max_limit = negotiate(tune.frame_max, options.frame_max_limit);
self.channel_zero.connection.frame_max_limit = self.connection.frame_max_limit;
let frame_max_limit = self.connection.frame_max_limit;
let tune_ok = protocol::connection::TuneOk {
channel_max: self.channel_max_limit,
frame_max: frame_max_limit, heartbeat: 0};
self.channel_zero.send_method_frame(&tune_ok);
let open = protocol::connection::Open{virtual_host: options.vhost.to_string(), capabilities: "".to_string(), insist: false };
let _ : protocol::connection::OpenOk = try!(self.channel_zero.rpc(&open, "connection.open-ok"));
Ok(())
}
/// `open_channel` will open a new amqp channel:
/// # Arguments
///
/// * `channel_id` - channel number
///
/// # Exmaple
/// ```no_run
/// use std::default::Default;
/// use amqp::session::{Options, Session};
/// let mut session = Session::new(Options { .. Default::default() }).ok().unwrap();
/// let channel = match session.open_channel(1){
/// Ok(channel) => channel,
/// Err(error) => panic!("Failed openning channel: {:?}", error)
/// };
/// ```
pub fn open_channel(&mut self, channel_id: u16) -> AMQPResult<channel::Channel> {
debug!("Openning channel: {}", channel_id);
let (sender, receiver) = sync_channel(CHANNEL_BUFFER_SIZE);
let mut channel = channel::Channel::new(channel_id, receiver, self.connection.clone());
try!(self.channels.lock().map_err(|_| AMQPError::SyncError)).insert(channel_id, sender);
try!(channel.open());
Ok(channel)
}
pub fn close(&mut self, reply_code: u16, reply_text: String) {
debug!("Closing session: reply_code: {}, reply_text: {}", reply_code, reply_text);
let close = protocol::connection::Close {reply_code: reply_code, reply_text: reply_text, class_id: 0, method_id: 0};
let _ : protocol::connection::CloseOk = self.channel_zero.rpc(&close, "connection.close-ok").ok().unwrap();
}
pub fn reading_loop(mut connection: Connection, channels: Arc<Mutex<HashMap<u16, SyncSender<Frame>>>>) -> () {
debug!("Starting reading loop");
loop {
let frame = match connection.read() {
Ok(frame) => frame,
Err(some_err) => {debug!("Error in reading loop: {:?}", some_err); break} //Notify session somehow. It should stop now.
};
let chans = channels.lock().unwrap();
let ref target_channel = (*chans)[&frame.channel];
target_channel.send(frame).ok().expect("Error sending packet");
// match frame.frame_type {
// framing::METHOD => {},
// framing::HEADERS => {},
// framing::BODY => {},
// framing::HEARTBEAT => {}
// }
// Handle heartbeats
}
debug!("Exiting reading loop");
}
}
fn negotiate<T : cmp::Ord>(their_value: T, our_value: T) -> T {
cmp::min(their_value, our_value)
}
fn scheme_type_mapper(scheme: &str) -> SchemeType {
match scheme{
"amqp" => SchemeType::Relative(5672),
_ => {panic!("Uknown scheme: {}", scheme)}
}
}
Percent-decode login&password in Session::open_url
use channel;
use connection::Connection;
use protocol::{self, MethodFrame};
use table;
use table::TableEntry::{FieldTable, Bool, LongString};
use framing::Frame;
use amqp_error::{AMQPResult, AMQPError};
use std::sync::{Arc, Mutex};
use std::default::Default;
use std::collections::HashMap;
use std::sync::mpsc::{SyncSender, sync_channel};
use std::thread;
use std::cmp;
use url::{UrlParser, SchemeType, percent_encoding};
const CHANNEL_BUFFER_SIZE :usize = 100;
#[derive(Debug)]
pub struct Options <'a> {
pub host: &'a str,
pub port: u16,
pub login: &'a str,
pub password: &'a str,
pub vhost: &'a str,
pub frame_max_limit: u32,
pub channel_max_limit: u16,
pub locale: &'a str
}
impl <'a> Default for Options <'a> {
fn default() -> Options <'a> {
Options {
host: "127.0.0.1", port: 5672, vhost: "/",
login: "guest", password: "guest",
frame_max_limit: 131072, channel_max_limit: 65535,
locale: "en_US"
}
}
}
pub struct Session {
connection: Connection,
channels: Arc<Mutex<HashMap<u16, SyncSender<Frame>> >>,
channel_max_limit: u16,
channel_zero: channel::Channel
}
impl Session {
/// Use `open_url` to create new amqp session from a "amqp url"
///
/// # Arguments
/// * `url_string`: The format is: `amqp://username:password@host:port/virtual_host`
///
/// Most of the params have their default, so you can just pass this:
/// `"amqp://localhost/"` and it will connect to rabbitmq server, running on `localhost` on port `65535`,
/// with login `"guest"`, password: `"guest"` to vhost `"/"`
pub fn open_url(url_string: &str) -> AMQPResult<Session> {
fn decode(string: &str) -> String {
let input: Vec<u8> = string.bytes().collect();
let decoded = percent_encoding::percent_decode(&input[..]);
String::from_utf8(decoded).unwrap()
}
let default: Options = Default::default();
let mut url_parser = UrlParser::new();
url_parser.scheme_type_mapper(scheme_type_mapper);
let url = url_parser.parse(url_string).unwrap();
let vhost = url.serialize_path().unwrap_or(default.vhost.to_string());
let host = url.domain().unwrap_or(default.host);
let port = url.port().unwrap_or(default.port);
let login = url.username().and_then(|u| match u { "" => None, _ => Some(decode(u))} ).unwrap_or(String::from(default.login));
let password = url.password().map(|p| decode(p)).unwrap_or(String::from(default.password));
let opts = Options { host: host, port: port,
login: &login, password: &password,
vhost: &vhost, ..Default::default()};
Session::new(opts)
}
/// Initialize new rabbitmq session.
/// You can use default options:
/// # Example
/// ```no_run
/// use std::default::Default;
/// use amqp::session::{Options, Session};
/// let session = match Session::new(Options { .. Default::default() }){
/// Ok(session) => session,
/// Err(error) => panic!("Failed openning an amqp session: {:?}", error)
/// };
/// ```
pub fn new(options: Options) -> AMQPResult<Session> {
let connection = try!(Connection::open(options.host, options.port));
let (channel_sender, channel_receiver) = sync_channel(CHANNEL_BUFFER_SIZE); //channel0
let channels = Arc::new(Mutex::new(HashMap::new()));
let channel_zero = channel::Channel::new(0, channel_receiver, connection.clone());
try!(channels.lock().map_err(|_| AMQPError::SyncError)).insert(0, channel_sender);
let con1 = connection.clone();
let channels_clone = channels.clone();
thread::spawn( || Session::reading_loop(con1, channels_clone ) );
let mut session = Session {
connection: connection,
channels: channels,
channel_max_limit: 0,
channel_zero: channel_zero
};
try!(session.init(options));
Ok(session)
}
fn init(&mut self, options: Options) -> AMQPResult<()> {
debug!("Starting init session");
let frame = self.channel_zero.read(); //Start
let method_frame = MethodFrame::decode(frame);
let start : protocol::connection::Start = match method_frame.method_name(){
"connection.start" => protocol::Method::decode(method_frame).ok().unwrap(),
meth => panic!("Unexpected method frame: {:?}", meth) //In reality you would probably skip the frame and try to read another?
};
debug!("Received connection.start: {:?}", start);
// The client selects a security mechanism (Start-Ok).
// The server starts the authentication process, which uses the SASL challenge-response model. It sends
// the client a challenge (Secure).
// The client sends an authentication response (Secure-Ok). For example using the "plain" mechanism,
// the response consist of a login name and password.
// The server repeats the challenge (Secure) or moves to negotiation, sending a set of parameters such as
let mut client_properties = table::new();
let mut capabilities = table::new();
capabilities.insert("publisher_confirms".to_string(), Bool(true));
capabilities.insert("consumer_cancel_notify".to_string(), Bool(true));
capabilities.insert("exchange_exchange_bindings".to_string(), Bool(true));
capabilities.insert("basic.nack".to_string(), Bool(true));
capabilities.insert("connection.blocked".to_string(), Bool(true));
capabilities.insert("authentication_failure_close".to_string(), Bool(true));
client_properties.insert("capabilities".to_string(), FieldTable(capabilities));
client_properties.insert("product".to_string(), LongString("rust-amqp".to_string()));
client_properties.insert("platform".to_string(), LongString("rust".to_string()));
client_properties.insert("version".to_string(), LongString("0.1".to_string()));
client_properties.insert("information".to_string(), LongString("https://github.com/Antti/rust-amqp".to_string()));
debug!("Sending connection.start-ok");
let start_ok = protocol::connection::StartOk {
client_properties: client_properties, mechanism: "PLAIN".to_string(),
response: format!("\0{}\0{}", options.login, options.password), locale: options.locale.to_string()};
let tune : protocol::connection::Tune = try!(self.channel_zero.rpc(&start_ok, "connection.tune"));
self.channel_max_limit = negotiate(tune.channel_max, self.channel_max_limit);
self.connection.frame_max_limit = negotiate(tune.frame_max, options.frame_max_limit);
self.channel_zero.connection.frame_max_limit = self.connection.frame_max_limit;
let frame_max_limit = self.connection.frame_max_limit;
let tune_ok = protocol::connection::TuneOk {
channel_max: self.channel_max_limit,
frame_max: frame_max_limit, heartbeat: 0};
self.channel_zero.send_method_frame(&tune_ok);
let open = protocol::connection::Open{virtual_host: options.vhost.to_string(), capabilities: "".to_string(), insist: false };
let _ : protocol::connection::OpenOk = try!(self.channel_zero.rpc(&open, "connection.open-ok"));
Ok(())
}
/// `open_channel` will open a new amqp channel:
/// # Arguments
///
/// * `channel_id` - channel number
///
/// # Exmaple
/// ```no_run
/// use std::default::Default;
/// use amqp::session::{Options, Session};
/// let mut session = Session::new(Options { .. Default::default() }).ok().unwrap();
/// let channel = match session.open_channel(1){
/// Ok(channel) => channel,
/// Err(error) => panic!("Failed openning channel: {:?}", error)
/// };
/// ```
pub fn open_channel(&mut self, channel_id: u16) -> AMQPResult<channel::Channel> {
debug!("Openning channel: {}", channel_id);
let (sender, receiver) = sync_channel(CHANNEL_BUFFER_SIZE);
let mut channel = channel::Channel::new(channel_id, receiver, self.connection.clone());
try!(self.channels.lock().map_err(|_| AMQPError::SyncError)).insert(channel_id, sender);
try!(channel.open());
Ok(channel)
}
pub fn close(&mut self, reply_code: u16, reply_text: String) {
debug!("Closing session: reply_code: {}, reply_text: {}", reply_code, reply_text);
let close = protocol::connection::Close {reply_code: reply_code, reply_text: reply_text, class_id: 0, method_id: 0};
let _ : protocol::connection::CloseOk = self.channel_zero.rpc(&close, "connection.close-ok").ok().unwrap();
}
pub fn reading_loop(mut connection: Connection, channels: Arc<Mutex<HashMap<u16, SyncSender<Frame>>>>) -> () {
debug!("Starting reading loop");
loop {
let frame = match connection.read() {
Ok(frame) => frame,
Err(some_err) => {debug!("Error in reading loop: {:?}", some_err); break} //Notify session somehow. It should stop now.
};
let chans = channels.lock().unwrap();
let ref target_channel = (*chans)[&frame.channel];
target_channel.send(frame).ok().expect("Error sending packet");
// match frame.frame_type {
// framing::METHOD => {},
// framing::HEADERS => {},
// framing::BODY => {},
// framing::HEARTBEAT => {}
// }
// Handle heartbeats
}
debug!("Exiting reading loop");
}
}
fn negotiate<T : cmp::Ord>(their_value: T, our_value: T) -> T {
cmp::min(their_value, our_value)
}
fn scheme_type_mapper(scheme: &str) -> SchemeType {
match scheme{
"amqp" => SchemeType::Relative(5672),
_ => {panic!("Uknown scheme: {}", scheme)}
}
}
|
//
// Sysinfo
//
// Copyright (c) 2015 Guillaume Gomez
//
//! `sysinfo` is a crate used to get a system's information.
//!
//! Before any attempt to read the different structs' information, you need to update them to
//! get up-to-date information.
//!
//! # Examples
//!
//! ```
//! use sysinfo::{ProcessExt, SystemExt};
//!
//! let mut system = sysinfo::System::new_all();
//!
//! // First we update all information of our system struct.
//! system.refresh_all();
//!
//! // Now let's print every process' id and name:
//! for (pid, proc_) in system.get_processes() {
//! println!("{}:{} => status: {:?}", pid, proc_.name(), proc_.status());
//! }
//!
//! // Then let's print the temperature of the different components:
//! for component in system.get_components() {
//! println!("{:?}", component);
//! }
//!
//! // And then all disks' information:
//! for disk in system.get_disks() {
//! println!("{:?}", disk);
//! }
//!
//! // And finally the RAM and SWAP information:
//! println!("total memory: {} KiB", system.get_total_memory());
//! println!("used memory : {} KiB", system.get_used_memory());
//! println!("total swap : {} KiB", system.get_total_swap());
//! println!("used swap : {} KiB", system.get_used_swap());
//! ```
#![crate_name = "sysinfo"]
#![crate_type = "lib"]
#![crate_type = "rlib"]
#![deny(missing_docs)]
#![deny(intra_doc_link_resolution_failure)]
//#![deny(warnings)]
#![allow(unknown_lints)]
#[macro_use]
extern crate cfg_if;
#[cfg(not(any(target_os = "unknown", target_arch = "wasm32")))]
extern crate libc;
extern crate rayon;
#[macro_use]
extern crate doc_comment;
#[cfg(test)]
doctest!("../README.md");
cfg_if! {
if #[cfg(target_os = "macos")] {
mod mac;
use mac as sys;
} else if #[cfg(windows)] {
mod windows;
use windows as sys;
extern crate winapi;
extern crate ntapi;
} else if #[cfg(unix)] {
mod linux;
use linux as sys;
} else {
mod unknown;
use unknown as sys;
}
}
pub use common::{AsU32, DiskType, NetworksIter, Pid, RefreshKind};
pub use sys::{Component, Disk, NetworkData, Networks, Process, ProcessStatus, Processor, System};
pub use traits::{
ComponentExt, DiskExt, NetworkExt, NetworksExt, ProcessExt, ProcessorExt, SystemExt, UserExt,
};
#[cfg(feature = "c-interface")]
pub use c_interface::*;
pub use utils::get_current_pid;
#[cfg(feature = "c-interface")]
mod c_interface;
mod common;
mod debug;
mod system;
mod traits;
mod utils;
/// This function is only used on linux targets, on the other platforms it does nothing.
///
/// On linux, to improve performance, we keep a `/proc` file open for each process we index with
/// a maximum number of files open equivalent to half of the system limit.
///
/// The problem is that some users might need all the available file descriptors so we need to
/// allow them to change this limit. Reducing
///
/// Note that if you set a limit bigger than the system limit, the system limit will be set.
///
/// Returns `true` if the new value has been set.
pub fn set_open_files_limit(mut _new_limit: isize) -> bool {
#[cfg(all(not(target_os = "macos"), unix))]
{
if _new_limit < 0 {
_new_limit = 0;
}
let max = sys::system::get_max_nb_fds();
if _new_limit > max {
_new_limit = max;
}
if let Ok(ref mut x) = unsafe { sys::system::REMAINING_FILES.lock() } {
// If files are already open, to be sure that the number won't be bigger when those
// files are closed, we subtract the current number of opened files to the new limit.
let diff = max - **x;
**x = _new_limit - diff;
true
} else {
false
}
}
#[cfg(any(not(unix), target_os = "macos"))]
{
false
}
}
/// An enum representing signal on UNIX-like systems.
#[repr(C)]
#[derive(Clone, PartialEq, PartialOrd, Debug, Copy)]
pub enum Signal {
/// Hangup detected on controlling terminal or death of controlling process.
Hangup = 1,
/// Interrupt from keyboard.
Interrupt = 2,
/// Quit from keyboard.
Quit = 3,
/// Illegal instruction.
Illegal = 4,
/// Trace/breakpoint trap.
Trap = 5,
/// Abort signal from C abort function.
Abort = 6,
// IOT trap. A synonym for SIGABRT.
// IOT = 6,
/// Bus error (bad memory access).
Bus = 7,
/// Floating point exception.
FloatingPointException = 8,
/// Kill signal.
Kill = 9,
/// User-defined signal 1.
User1 = 10,
/// Invalid memory reference.
Segv = 11,
/// User-defined signal 2.
User2 = 12,
/// Broken pipe: write to pipe with no readers.
Pipe = 13,
/// Timer signal from C alarm function.
Alarm = 14,
/// Termination signal.
Term = 15,
/// Stack fault on coprocessor (unused).
Stklft = 16,
/// Child stopped or terminated.
Child = 17,
/// Continue if stopped.
Continue = 18,
/// Stop process.
Stop = 19,
/// Stop typed at terminal.
TSTP = 20,
/// Terminal input for background process.
TTIN = 21,
/// Terminal output for background process.
TTOU = 22,
/// Urgent condition on socket.
Urgent = 23,
/// CPU time limit exceeded.
XCPU = 24,
/// File size limit exceeded.
XFSZ = 25,
/// Virtual alarm clock.
VirtualAlarm = 26,
/// Profiling time expired.
Profiling = 27,
/// Windows resize signal.
Winch = 28,
/// I/O now possible.
IO = 29,
// Pollable event (Sys V). Synonym for IO
//Poll = 29,
/// Power failure (System V).
Power = 30,
/// Bad argument to routine (SVr4).
Sys = 31,
}
/// A struct represents system load average value.
#[repr(C)]
#[derive(Default, Debug, Clone)]
pub struct LoadAvg {
/// Average load within one minute.
pub one: f64,
/// Average load within five minutes.
pub five: f64,
/// Average load within fifteen minutes.
pub fifteen: f64,
}
/// Type containing user information.
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct User {
name: String,
groups: Vec<String>,
}
impl UserExt for User {
fn get_name(&self) -> &str {
&self.name
}
fn get_groups(&self) -> &[String] {
&self.groups
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn check_memory_usage() {
let mut s = ::System::new();
s.refresh_all();
assert_eq!(
s.get_processes()
.iter()
.all(|(_, proc_)| proc_.memory() == 0),
false
);
}
}
Add check that the users function is working as expected
//
// Sysinfo
//
// Copyright (c) 2015 Guillaume Gomez
//
//! `sysinfo` is a crate used to get a system's information.
//!
//! Before any attempt to read the different structs' information, you need to update them to
//! get up-to-date information.
//!
//! # Examples
//!
//! ```
//! use sysinfo::{ProcessExt, SystemExt};
//!
//! let mut system = sysinfo::System::new_all();
//!
//! // First we update all information of our system struct.
//! system.refresh_all();
//!
//! // Now let's print every process' id and name:
//! for (pid, proc_) in system.get_processes() {
//! println!("{}:{} => status: {:?}", pid, proc_.name(), proc_.status());
//! }
//!
//! // Then let's print the temperature of the different components:
//! for component in system.get_components() {
//! println!("{:?}", component);
//! }
//!
//! // And then all disks' information:
//! for disk in system.get_disks() {
//! println!("{:?}", disk);
//! }
//!
//! // And finally the RAM and SWAP information:
//! println!("total memory: {} KiB", system.get_total_memory());
//! println!("used memory : {} KiB", system.get_used_memory());
//! println!("total swap : {} KiB", system.get_total_swap());
//! println!("used swap : {} KiB", system.get_used_swap());
//! ```
#![crate_name = "sysinfo"]
#![crate_type = "lib"]
#![crate_type = "rlib"]
#![deny(missing_docs)]
#![deny(intra_doc_link_resolution_failure)]
//#![deny(warnings)]
#![allow(unknown_lints)]
#[macro_use]
extern crate cfg_if;
#[cfg(not(any(target_os = "unknown", target_arch = "wasm32")))]
extern crate libc;
extern crate rayon;
#[macro_use]
extern crate doc_comment;
#[cfg(test)]
doctest!("../README.md");
cfg_if! {
if #[cfg(target_os = "macos")] {
mod mac;
use mac as sys;
#[cfg(test)]
const MIN_USERS: usize = 1;
} else if #[cfg(windows)] {
mod windows;
use windows as sys;
extern crate winapi;
extern crate ntapi;
#[cfg(test)]
const MIN_USERS: usize = 1;
} else if #[cfg(unix)] {
mod linux;
use linux as sys;
#[cfg(test)]
const MIN_USERS: usize = 1;
} else {
mod unknown;
use unknown as sys;
#[cfg(test)]
const MIN_USERS: usize = 0;
}
}
pub use common::{AsU32, DiskType, NetworksIter, Pid, RefreshKind};
pub use sys::{Component, Disk, NetworkData, Networks, Process, ProcessStatus, Processor, System};
pub use traits::{
ComponentExt, DiskExt, NetworkExt, NetworksExt, ProcessExt, ProcessorExt, SystemExt, UserExt,
};
#[cfg(feature = "c-interface")]
pub use c_interface::*;
pub use utils::get_current_pid;
#[cfg(feature = "c-interface")]
mod c_interface;
mod common;
mod debug;
mod system;
mod traits;
mod utils;
/// This function is only used on linux targets, on the other platforms it does nothing.
///
/// On linux, to improve performance, we keep a `/proc` file open for each process we index with
/// a maximum number of files open equivalent to half of the system limit.
///
/// The problem is that some users might need all the available file descriptors so we need to
/// allow them to change this limit. Reducing
///
/// Note that if you set a limit bigger than the system limit, the system limit will be set.
///
/// Returns `true` if the new value has been set.
pub fn set_open_files_limit(mut _new_limit: isize) -> bool {
#[cfg(all(not(target_os = "macos"), unix))]
{
if _new_limit < 0 {
_new_limit = 0;
}
let max = sys::system::get_max_nb_fds();
if _new_limit > max {
_new_limit = max;
}
if let Ok(ref mut x) = unsafe { sys::system::REMAINING_FILES.lock() } {
// If files are already open, to be sure that the number won't be bigger when those
// files are closed, we subtract the current number of opened files to the new limit.
let diff = max - **x;
**x = _new_limit - diff;
true
} else {
false
}
}
#[cfg(any(not(unix), target_os = "macos"))]
{
false
}
}
/// An enum representing signal on UNIX-like systems.
#[repr(C)]
#[derive(Clone, PartialEq, PartialOrd, Debug, Copy)]
pub enum Signal {
/// Hangup detected on controlling terminal or death of controlling process.
Hangup = 1,
/// Interrupt from keyboard.
Interrupt = 2,
/// Quit from keyboard.
Quit = 3,
/// Illegal instruction.
Illegal = 4,
/// Trace/breakpoint trap.
Trap = 5,
/// Abort signal from C abort function.
Abort = 6,
// IOT trap. A synonym for SIGABRT.
// IOT = 6,
/// Bus error (bad memory access).
Bus = 7,
/// Floating point exception.
FloatingPointException = 8,
/// Kill signal.
Kill = 9,
/// User-defined signal 1.
User1 = 10,
/// Invalid memory reference.
Segv = 11,
/// User-defined signal 2.
User2 = 12,
/// Broken pipe: write to pipe with no readers.
Pipe = 13,
/// Timer signal from C alarm function.
Alarm = 14,
/// Termination signal.
Term = 15,
/// Stack fault on coprocessor (unused).
Stklft = 16,
/// Child stopped or terminated.
Child = 17,
/// Continue if stopped.
Continue = 18,
/// Stop process.
Stop = 19,
/// Stop typed at terminal.
TSTP = 20,
/// Terminal input for background process.
TTIN = 21,
/// Terminal output for background process.
TTOU = 22,
/// Urgent condition on socket.
Urgent = 23,
/// CPU time limit exceeded.
XCPU = 24,
/// File size limit exceeded.
XFSZ = 25,
/// Virtual alarm clock.
VirtualAlarm = 26,
/// Profiling time expired.
Profiling = 27,
/// Windows resize signal.
Winch = 28,
/// I/O now possible.
IO = 29,
// Pollable event (Sys V). Synonym for IO
//Poll = 29,
/// Power failure (System V).
Power = 30,
/// Bad argument to routine (SVr4).
Sys = 31,
}
/// A struct represents system load average value.
#[repr(C)]
#[derive(Default, Debug, Clone)]
pub struct LoadAvg {
/// Average load within one minute.
pub one: f64,
/// Average load within five minutes.
pub five: f64,
/// Average load within fifteen minutes.
pub fifteen: f64,
}
/// Type containing user information.
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct User {
name: String,
groups: Vec<String>,
}
impl UserExt for User {
fn get_name(&self) -> &str {
&self.name
}
fn get_groups(&self) -> &[String] {
&self.groups
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn check_memory_usage() {
let mut s = ::System::new();
s.refresh_all();
assert_eq!(
s.get_processes()
.iter()
.all(|(_, proc_)| proc_.memory() == 0),
false
);
}
#[test]
fn check_users() {
let mut s = ::System::new();
assert!(s.get_users().is_empty());
s.refresh_users_list();
assert!(s.get_users().len() >= MIN_USERS);
}
}
|
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::HashSet;
use std::borrow::BorrowMut;
use std::fmt;
use std::fmt::Display;
type Id = String;
#[derive(Debug)]
pub enum EvaluationError {
UnboundVariable(Rc<Id>),
IllformedExpression,
}
impl fmt::Display for EvaluationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::EvaluationError::*;
match *self {
UnboundVariable(ref id) => write!(f, "Encountered unbound variable: {}", id),
IllformedExpression => write!(f, "Illformed expression"),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Type {
Var(Rc<Id>),
Fun(Rc<Type>, Rc<Type>),
Forall(Rc<Id>, Rc<Type>),
}
impl Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Type::Var(ref x) => write!(f,"{}",x),
Type::Fun(ref a, ref b) => write!(f,"({}) -> {}",a,b),
Type::Forall(ref x, ref t) => write!(f,"(forall {}. {})", x, t),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Expr {
Var(Rc<Id>),
Lam(Rc<Id>, Rc<Type>, Rc<Expr>),
App(Rc<Expr>, Rc<Expr>),
TLam(Rc<Id>, Rc<Expr>),
TApp(Rc<Expr>, Rc<Type>),
Let(Rc<Id>, Rc<Type>, Rc<Expr>, Rc<Expr>),
TLet(Rc<Id>, Rc<Type>, Rc<Expr>),
}
impl Display for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Expr::Var(ref x) => write!(f,"{}",x),
Expr::Lam(ref x, ref t, ref e) => write!(f,"(\\{}:{}.{})",x,t,e),
Expr::App(ref e1, ref e2) => write!(f,"({} {})",e1,e2),
Expr::TLam(ref X, ref e) => write!(f,"(/\\{}.{})",X,e),
Expr::TApp(ref e, ref t) => write!(f,"({} [{}])",e,t),
Expr::Let(ref x, ref t, ref e1, ref e2) => write!(f,"(let {}:{} = {} in {})",x,t,e1,e2),
Expr::TLet(ref X, ref t, ref e) => write!(f,"(Let {} = {} in {})",X,t,e),
}
}
}
impl Expr {
/// Evaluates an expression and returns either the normal form or an error
/// if it is illformed
pub fn eval(self) -> Result<Rc<Expr>, EvaluationError> {
Expr::eval_expr(Rc::new(self))
}
/// Evaluates an expression and returns either the normal form or an error
/// if it is illformed
pub fn eval_expr(e: Rc<Expr>) -> Result<Rc<Expr>, EvaluationError> {
Expr::eval_helper(e, &mut HashMap::new(), &mut HashMap::new())
}
/// Workhorse for `eval` and `eval_expr`
fn eval_helper(e: Rc<Expr>,
emap: &mut HashMap<&str, Rc<Expr>>,
tmap: &mut HashMap<&str, Rc<Type>>)
-> Result<Rc<Expr>, EvaluationError> {
use self::EvaluationError::*;
let result = match *e {
Expr::Var(ref id) => {
match emap.get::<str>(id) {
Some(ref e) => Ok((*e).clone()),
None => Err(UnboundVariable(id.clone())),
}
}
Expr::Lam(_, _, _) |
Expr::TLam(_, _) => Ok(e.clone()),
Expr::App(ref e1, ref e2) => {
let v1 = Expr::eval_helper(e1.clone(), emap, tmap)?;
let v2 = Expr::eval_helper(e2.clone(), emap, tmap)?;
match *v1 {
Expr::Lam(ref x, _, ref e3) => {
let mut new_map = emap.clone();
new_map.insert(x, e3.clone());
Expr::eval_helper(e2.clone(), &mut new_map, tmap)
},
_ => Err(IllformedExpression),
}
}
Expr::TApp(ref e, ref t) => {
let v = Expr::eval_helper(e.clone(), emap, tmap)?;
match *v {
Expr::TLam(ref x, ref e) => {
let mut new_map = tmap.clone();
new_map.insert(x, t.clone());
Expr::eval_helper(e.clone(), emap, &mut new_map)
},
_ => Err(IllformedExpression),
}
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
let desugared = Expr::App(Rc::new(Expr::Lam(x.clone(), t.clone(), e2.clone())),
e1.clone());
Expr::eval_helper(Rc::new(desugared), emap, tmap)
},
Expr::TLet(ref X, ref t, ref e) => {
let desugared = Expr::TApp(Rc::new(Expr::TLam(X.clone(), e.clone())), t.clone());
Expr::eval_helper(Rc::new(desugared), emap, tmap)
}
}?;
Expr::expand_types(result, tmap, &mut HashSet::new())
}
/// Recursively expand the free variables of the types of the expression
fn expand_types(e: Rc<Expr>, map: &HashMap<&str,Rc<Type>>, bvs: &mut HashSet<&str>)
-> Result<Rc<Expr>,EvaluationError> {
match *e {
Expr::Var(_) => {
Ok(e.clone())
},
Expr::App(ref e1, ref e2) => {
let r1 = Expr::expand_types(e1.clone(), map, bvs)?;
let r2 = Expr::expand_types(e2.clone(), map, bvs)?;
Ok(Rc::new(Expr::App(r1,r2)))
},
Expr::TApp(ref e, ref t) => {
let re = Expr::expand_types(e.clone(), map, bvs)?;
let te = Type::eval(t.clone(), map, bvs)?;
Ok(Rc::new(Expr::TApp(re, te)))
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
let r1 = Expr::expand_types(e1.clone(), map, bvs)?;
let r2 = Expr::expand_types(e2.clone(), map, bvs)?;
let te = Type::eval(t.clone(), map, bvs)?;
Ok(Rc::new(Expr::Let(x.clone(), te, r1, r2)))
},
Expr::TLet(ref X, ref t, ref e) => {
let te = Type::eval(t.clone(), map, bvs)?;
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
let re = Expr::expand_types(e.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Expr::TLet(X.clone(), te, re)))
},
Expr::Lam(ref x, ref t, ref e) => {
let te = Type::eval(t.clone(), map, bvs)?;
let re = Expr::expand_types(e.clone(), map, bvs)?;
Ok(Rc::new(Expr::Lam(x.clone(), te, re)))
},
Expr::TLam(ref X, ref e) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
let re = Expr::expand_types(e.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Expr::TLam(X.clone(), re)))
},
}
}
pub fn type_check(&self) -> Option<Type> {
self.type_check_helper(&HashMap::new(), &HashMap::new(), &HashSet::new())
}
fn type_check_helper(&self,
emap: &HashMap<&str,&Type>,
tmap: &HashMap<&str,&Type>,
bvs: &HashSet<&str>) -> Option<Type> {
match *self {
Expr::Var(ref id) => {
emap.get::<str>(&**id).map(|t| { (*t).clone() } )
},
Expr::Lam(ref x, ref t, ref e) => {
let mut new_emap = emap.clone();
new_emap.insert(x, t);
let tbod = e.type_check_helper(&new_emap, tmap, bvs);
tbod.map(|tb| { Type::Fun((*t).clone(), Rc::new(tb)) })
},
Expr::App(ref e1, ref e2) => {
let t1 = e1.type_check_helper(emap, tmap, bvs);
let t2 = e2.type_check_helper(emap, tmap, bvs);
match t1 {
Some(Type::Fun(ta,tb)) => match t2 {
Some(tc) => {
if Type::alpha_equiv(&ta,&tc) {
Some((*tb).clone())
}
else {
None
}
},
_ => None,
},
_ => None,
}
},
Expr::TLam(ref X, ref e) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
e.type_check_helper(emap, tmap, &new_bvs)
.map(|t| { Type::Forall((*X).clone(),Rc::new(t)) })
},
Expr::TApp(ref e, ref t) => {
let t1 = e.type_check_helper(emap, tmap, bvs);
match t1 {
Some(Type::Forall(X,t2)) => {
let mut new_tmap = tmap.clone();
new_tmap.insert(&*X, t);
let new_tmap = {
let mut map: HashMap<&str,Rc<Type>> = HashMap::new();
for (k,v) in new_tmap {
map.insert(k, Rc::new((*v).clone()));
}
map
};
Some((*Type::eval(t2, &new_tmap, bvs).unwrap()).clone())
},
_ => None,
}
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
(Expr::App(
Rc::new(Expr::Lam(x.clone(),t.clone(),e2.clone())),
e2.clone())).type_check_helper(emap, tmap, bvs)
},
Expr::TLet(ref X, ref t, ref e) => {
(Expr::TApp(
Rc::new(Expr::TLam(X.clone(),e.clone())),
t.clone())).type_check_helper(emap, tmap, bvs)
},
}
}
}
impl Type {
/// Recursively expand the free variables of a type using the given map
fn eval(t: Rc<Type>, map: &HashMap<&str,Rc<Type>>, bvs: &HashSet<&str>)
-> Result<Rc<Type>,EvaluationError> {
match *t {
Type::Var(ref id) => {
if bvs.contains::<str>(id) {
Ok(t.clone())
} else {
match map.get::<str>(id) {
Some(ref t) => Type::eval((*t).clone(), map, bvs),
None => Err(EvaluationError::UnboundVariable(id.clone())),
}
}
},
Type::Fun(ref t1, ref t2) => {
let rt1 = Type::eval(t1.clone(), map, bvs)?;
let rt2 = Type::eval(t2.clone(), map, bvs)?;
Ok(Rc::new(Type::Fun(rt1, rt2)))
},
Type::Forall(ref id, ref t) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(&**id);
let rt = Type::eval(t.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Type::Forall(id.clone(), rt)))
},
}
}
fn alpha_equiv(t1: &Type, t2: &Type) -> bool {
// TODO
unimplemented!()
}
}
Fix serious error in typechecking of let exprs
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::HashSet;
use std::borrow::BorrowMut;
use std::fmt;
use std::fmt::Display;
type Id = String;
#[derive(Debug)]
pub enum EvaluationError {
UnboundVariable(Rc<Id>),
IllformedExpression,
}
impl fmt::Display for EvaluationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::EvaluationError::*;
match *self {
UnboundVariable(ref id) => write!(f, "Encountered unbound variable: {}", id),
IllformedExpression => write!(f, "Illformed expression"),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Type {
Var(Rc<Id>),
Fun(Rc<Type>, Rc<Type>),
Forall(Rc<Id>, Rc<Type>),
}
impl Display for Type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Type::Var(ref x) => write!(f,"{}",x),
Type::Fun(ref a, ref b) => write!(f,"({}) -> {}",a,b),
Type::Forall(ref x, ref t) => write!(f,"(forall {}. {})", x, t),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Expr {
Var(Rc<Id>),
Lam(Rc<Id>, Rc<Type>, Rc<Expr>),
App(Rc<Expr>, Rc<Expr>),
TLam(Rc<Id>, Rc<Expr>),
TApp(Rc<Expr>, Rc<Type>),
Let(Rc<Id>, Rc<Type>, Rc<Expr>, Rc<Expr>),
TLet(Rc<Id>, Rc<Type>, Rc<Expr>),
}
impl Display for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Expr::Var(ref x) => write!(f,"{}",x),
Expr::Lam(ref x, ref t, ref e) => write!(f,"(\\{}:{}.{})",x,t,e),
Expr::App(ref e1, ref e2) => write!(f,"({} {})",e1,e2),
Expr::TLam(ref X, ref e) => write!(f,"(/\\{}.{})",X,e),
Expr::TApp(ref e, ref t) => write!(f,"({} [{}])",e,t),
Expr::Let(ref x, ref t, ref e1, ref e2) => write!(f,"(let {}:{} = {} in {})",x,t,e1,e2),
Expr::TLet(ref X, ref t, ref e) => write!(f,"(Let {} = {} in {})",X,t,e),
}
}
}
impl Expr {
/// Evaluates an expression and returns either the normal form or an error
/// if it is illformed
pub fn eval(self) -> Result<Rc<Expr>, EvaluationError> {
Expr::eval_expr(Rc::new(self))
}
/// Evaluates an expression and returns either the normal form or an error
/// if it is illformed
pub fn eval_expr(e: Rc<Expr>) -> Result<Rc<Expr>, EvaluationError> {
Expr::eval_helper(e, &mut HashMap::new(), &mut HashMap::new())
}
/// Workhorse for `eval` and `eval_expr`
fn eval_helper(e: Rc<Expr>,
emap: &mut HashMap<&str, Rc<Expr>>,
tmap: &mut HashMap<&str, Rc<Type>>)
-> Result<Rc<Expr>, EvaluationError> {
use self::EvaluationError::*;
let result = match *e {
Expr::Var(ref id) => {
match emap.get::<str>(id) {
Some(ref e) => Ok((*e).clone()),
None => Err(UnboundVariable(id.clone())),
}
}
Expr::Lam(_, _, _) |
Expr::TLam(_, _) => Ok(e.clone()),
Expr::App(ref e1, ref e2) => {
let v1 = Expr::eval_helper(e1.clone(), emap, tmap)?;
let v2 = Expr::eval_helper(e2.clone(), emap, tmap)?;
match *v1 {
Expr::Lam(ref x, _, ref e3) => {
let mut new_map = emap.clone();
new_map.insert(x, e3.clone());
Expr::eval_helper(e2.clone(), &mut new_map, tmap)
},
_ => Err(IllformedExpression),
}
}
Expr::TApp(ref e, ref t) => {
let v = Expr::eval_helper(e.clone(), emap, tmap)?;
match *v {
Expr::TLam(ref x, ref e) => {
let mut new_map = tmap.clone();
new_map.insert(x, t.clone());
Expr::eval_helper(e.clone(), emap, &mut new_map)
},
_ => Err(IllformedExpression),
}
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
let desugared = Expr::App(Rc::new(Expr::Lam(x.clone(), t.clone(), e2.clone())),
e1.clone());
Expr::eval_helper(Rc::new(desugared), emap, tmap)
},
Expr::TLet(ref X, ref t, ref e) => {
let desugared = Expr::TApp(Rc::new(Expr::TLam(X.clone(), e.clone())), t.clone());
Expr::eval_helper(Rc::new(desugared), emap, tmap)
}
}?;
Expr::expand_types(result, tmap, &mut HashSet::new())
}
/// Recursively expand the free variables of the types of the expression
fn expand_types(e: Rc<Expr>, map: &HashMap<&str,Rc<Type>>, bvs: &mut HashSet<&str>)
-> Result<Rc<Expr>,EvaluationError> {
match *e {
Expr::Var(_) => {
Ok(e.clone())
},
Expr::App(ref e1, ref e2) => {
let r1 = Expr::expand_types(e1.clone(), map, bvs)?;
let r2 = Expr::expand_types(e2.clone(), map, bvs)?;
Ok(Rc::new(Expr::App(r1,r2)))
},
Expr::TApp(ref e, ref t) => {
let re = Expr::expand_types(e.clone(), map, bvs)?;
let te = Type::eval(t.clone(), map, bvs)?;
Ok(Rc::new(Expr::TApp(re, te)))
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
let r1 = Expr::expand_types(e1.clone(), map, bvs)?;
let r2 = Expr::expand_types(e2.clone(), map, bvs)?;
let te = Type::eval(t.clone(), map, bvs)?;
Ok(Rc::new(Expr::Let(x.clone(), te, r1, r2)))
},
Expr::TLet(ref X, ref t, ref e) => {
let te = Type::eval(t.clone(), map, bvs)?;
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
let re = Expr::expand_types(e.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Expr::TLet(X.clone(), te, re)))
},
Expr::Lam(ref x, ref t, ref e) => {
let te = Type::eval(t.clone(), map, bvs)?;
let re = Expr::expand_types(e.clone(), map, bvs)?;
Ok(Rc::new(Expr::Lam(x.clone(), te, re)))
},
Expr::TLam(ref X, ref e) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
let re = Expr::expand_types(e.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Expr::TLam(X.clone(), re)))
},
}
}
pub fn type_check(&self) -> Option<Type> {
self.type_check_helper(&HashMap::new(), &HashMap::new(), &HashSet::new())
}
fn type_check_helper(&self,
emap: &HashMap<&str,&Type>,
tmap: &HashMap<&str,&Type>,
bvs: &HashSet<&str>) -> Option<Type> {
match *self {
Expr::Var(ref id) => {
emap.get::<str>(&**id).map(|t| { (*t).clone() } )
},
Expr::Lam(ref x, ref t, ref e) => {
let mut new_emap = emap.clone();
new_emap.insert(x, t);
let tbod = e.type_check_helper(&new_emap, tmap, bvs);
tbod.map(|tb| { Type::Fun((*t).clone(), Rc::new(tb)) })
},
Expr::App(ref e1, ref e2) => {
let t1 = e1.type_check_helper(emap, tmap, bvs);
let t2 = e2.type_check_helper(emap, tmap, bvs);
match t1 {
Some(Type::Fun(ta,tb)) => match t2 {
Some(tc) => {
if Type::alpha_equiv(&ta,&tc) {
Some((*tb).clone())
}
else {
None
}
},
_ => None,
},
_ => None,
}
},
Expr::TLam(ref X, ref e) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(X);
e.type_check_helper(emap, tmap, &new_bvs)
.map(|t| { Type::Forall((*X).clone(),Rc::new(t)) })
},
Expr::TApp(ref e, ref t) => {
let t1 = e.type_check_helper(emap, tmap, bvs);
match t1 {
Some(Type::Forall(X,t2)) => {
let mut new_tmap = tmap.clone();
new_tmap.insert(&*X, t);
let new_tmap = {
let mut map: HashMap<&str,Rc<Type>> = HashMap::new();
for (k,v) in new_tmap {
map.insert(k, Rc::new((*v).clone()));
}
map
};
Some((*Type::eval(t2, &new_tmap, bvs).unwrap()).clone())
},
_ => None,
}
},
Expr::Let(ref x, ref t, ref e1, ref e2) => {
(Expr::App(
Rc::new(Expr::Lam(x.clone(),t.clone(),e2.clone())),
e1.clone())).type_check_helper(emap, tmap, bvs)
},
Expr::TLet(ref X, ref t, ref e) => {
(Expr::TApp(
Rc::new(Expr::TLam(X.clone(),e.clone())),
t.clone())).type_check_helper(emap, tmap, bvs)
},
}
}
}
impl Type {
/// Recursively expand the free variables of a type using the given map
fn eval(t: Rc<Type>, map: &HashMap<&str,Rc<Type>>, bvs: &HashSet<&str>)
-> Result<Rc<Type>,EvaluationError> {
match *t {
Type::Var(ref id) => {
if bvs.contains::<str>(id) {
Ok(t.clone())
} else {
match map.get::<str>(id) {
Some(ref t) => Type::eval((*t).clone(), map, bvs),
None => Err(EvaluationError::UnboundVariable(id.clone())),
}
}
},
Type::Fun(ref t1, ref t2) => {
let rt1 = Type::eval(t1.clone(), map, bvs)?;
let rt2 = Type::eval(t2.clone(), map, bvs)?;
Ok(Rc::new(Type::Fun(rt1, rt2)))
},
Type::Forall(ref id, ref t) => {
let mut new_bvs = bvs.clone();
new_bvs.insert(&**id);
let rt = Type::eval(t.clone(), map, &mut new_bvs)?;
Ok(Rc::new(Type::Forall(id.clone(), rt)))
},
}
}
fn alpha_equiv(t1: &Type, t2: &Type) -> bool {
// TODO
unimplemented!()
}
} |
//! Representation of CSS types, and the CSS parsing and matching engine.
//!
//! # Terminology
//!
//! Consider a CSS **stylesheet** like this:
//!
//! ```ignore
//! @import url("another.css");
//!
//! foo, .bar {
//! fill: red;
//! stroke: green;
//! }
//!
//! #baz { stroke-width: 42; }
//!
//! ```
//! The example contains three **rules**, the first one is an **at-rule*,
//! the other two are **qualified rules**.
//!
//! Each rule is made of two parts, a **prelude** and an optional **block**
//! The prelude is the part until the first `{` or until `;`, depending on
//! whether a block is present. The block is the part between curly braces.
//!
//! Let's look at each rule:
//!
//! `@import` is an **at-rule**. This rule has a prelude, but no block.
//! There are other at-rules like `@media` and some of them may have a block,
//! but librsvg doesn't support those yet.
//!
//! The prelude of the following rule is `foo, .bar`.
//! It is a **selector list** with two **selectors**, one for
//! `foo` elements and one for elements that have the `bar` class.
//!
//! The content of the block between `{}` for a qualified rule is a
//! **declaration list**. The block of the first qualified rule contains two
//! **declarations**, one for the `fill` **property** and one for the
//! `stroke` property.
//!
//! After ther first qualified rule, we have a second qualified rule with
//! a single selector for the `#baz` id, with a single declaration for the
//! `stroke-width` property.
//!
//! # Helper crates we use
//!
//! * `cssparser` crate as a CSS tokenizer, and some utilities to
//! parse CSS rules and declarations.
//!
//! * `selectors` crate for the representation of selectors and
//! selector lists, and for the matching engine.
//!
//! Both crates provide very generic implementations of their concepts,
//! and expect the caller to provide implementations of various traits,
//! and to provide types that represent certain things.
//!
//! For example, `cssparser` expects one to provide representations of
//! the following types:
//!
//! * A parsed CSS rule. For `fill: blue;` we have
//! `ParsedProperty::Fill(...)`.
//!
//! * A parsed selector list; we use `SelectorList` from the
//! `selectors` crate.
//!
//! In turn, the `selectors` crate needs a way to navigate and examine
//! one's implementation of an element tree. We provide `impl
//! selectors::Element for RsvgElement` for this. This implementation
//! has methods like "does this element have the id `#foo`", or "give
//! me the next sibling element".
//!
//! Finally, the matching engine ties all of this together with
//! `matches_selector()`. This takes an opaque representation of an
//! element, plus a selector, and returns a bool. We iterate through
//! the rules in the stylesheets and gather the matches; then sort the
//! matches by specificity and apply the result to each element.
use cssparser::*;
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, MatchingContext, MatchingMode, QuirksMode};
use selectors::{self, OpaqueElement, SelectorImpl, SelectorList};
use std::fmt;
use std::str;
use markup5ever::{namespace_url, ns, LocalName, Namespace, Prefix, QualName};
use url::Url;
use crate::allowed_url::AllowedUrl;
use crate::error::*;
use crate::io::{self, BinaryData};
use crate::node::{NodeCascade, NodeType, RsvgNode};
use crate::properties::{parse_property, ComputedValues, ParsedProperty};
use crate::text::NodeChars;
/// A parsed CSS declaration
///
/// For example, in the declaration `fill: green !important`, the
/// `prop_name` would be `fill`, the `property` would be
/// `ParsedProperty::Fill(...)` with the green value, and `important`
/// would be `true`.
pub struct Declaration {
pub prop_name: QualName,
pub property: ParsedProperty,
pub important: bool,
}
/// Dummy struct required to use `cssparser::DeclarationListParser`
///
/// It implements `cssparser::DeclarationParser`, which knows how to parse
/// the property/value pairs from a CSS declaration.
pub struct DeclParser;
impl<'i> DeclarationParser<'i> for DeclParser {
type Declaration = Declaration;
type Error = ValueErrorKind;
/// Parses a CSS declaration like `name: input_value [!important]`
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<Declaration, cssparser::ParseError<'i, ValueErrorKind>> {
let prop_name = QualName::new(None, ns!(svg), LocalName::from(name.as_ref()));
let property =
parse_property(&prop_name, input, true).map_err(|e| input.new_custom_error(e))?;
let important = input.try_parse(parse_important).is_ok();
Ok(Declaration {
prop_name,
property,
important,
})
}
}
// cssparser's DeclarationListParser requires this; we just use the dummy
// implementations from cssparser itself. We may want to provide a real
// implementation in the future, although this may require keeping track of the
// CSS parsing state like Servo does.
impl<'i> AtRuleParser<'i> for DeclParser {
type PreludeBlock = ();
type PreludeNoBlock = ();
type AtRule = Declaration;
type Error = ValueErrorKind;
}
/// Dummy struct to implement cssparser::QualifiedRuleParser and
/// cssparser::AtRuleParser
pub struct RuleParser;
/// Errors from the CSS parsing process
pub enum CssParseErrorKind<'i> {
Selector(selectors::parser::SelectorParseErrorKind<'i>),
Value(ValueErrorKind),
}
impl<'i> From<selectors::parser::SelectorParseErrorKind<'i>> for CssParseErrorKind<'i> {
fn from(e: selectors::parser::SelectorParseErrorKind) -> CssParseErrorKind {
CssParseErrorKind::Selector(e)
}
}
/// A CSS qualified rule (or ruleset)
pub struct QualifiedRule {
selectors: SelectorList<Selector>,
declarations: Vec<Declaration>,
}
/// Prelude of at-rule used in the AtRuleParser.
pub enum AtRulePrelude {
Import(String),
}
/// A CSS at-rule (or ruleset)
pub enum AtRule {
Import(String),
}
/// A CSS rule (or ruleset)
pub enum Rule {
AtRule(AtRule),
QualifiedRule(QualifiedRule),
}
// Required to implement the `Prelude` associated type in `cssparser::QualifiedRuleParser`
impl<'i> selectors::Parser<'i> for RuleParser {
type Impl = Selector;
type Error = CssParseErrorKind<'i>;
fn default_namespace(&self) -> Option<<Self::Impl as SelectorImpl>::NamespaceUrl> {
Some(ns!(svg))
}
fn namespace_for_prefix(
&self,
_prefix: &<Self::Impl as SelectorImpl>::NamespacePrefix,
) -> Option<<Self::Impl as SelectorImpl>::NamespaceUrl> {
// FIXME: Do we need to keep a lookup table extracted from libxml2's
// XML namespaces?
//
// Or are CSS namespaces completely different, declared elsewhere?
None
}
}
// `cssparser::RuleListParser` is a struct which requires that we
// provide a type that implements `cssparser::QualifiedRuleParser`.
//
// In turn, `cssparser::QualifiedRuleParser` requires that we
// implement a way to parse the `Prelude` of a ruleset or rule. For
// example, in this ruleset:
//
// ```ignore
// foo, .bar { fill: red; stroke: green; }
// ```
//
// The prelude is the selector list with the `foo` and `.bar` selectors.
//
// The `parse_prelude` method just uses `selectors::SelectorList`. This
// is what requires the `impl selectors::Parser for RuleParser`.
//
// Next, the `parse_block` method takes an already-parsed prelude (a selector list),
// and tries to parse the block between braces. It creates a `Rule` out of
// the selector list and the declaration list.
impl<'i> QualifiedRuleParser<'i> for RuleParser {
type Prelude = SelectorList<Selector>;
type QualifiedRule = Rule;
type Error = CssParseErrorKind<'i>;
fn parse_prelude<'t>(
&mut self,
input: &mut Parser<'i, 't>,
) -> Result<Self::Prelude, cssparser::ParseError<'i, Self::Error>> {
SelectorList::parse(self, input)
}
fn parse_block<'t>(
&mut self,
prelude: Self::Prelude,
_location: SourceLocation,
input: &mut Parser<'i, 't>,
) -> Result<Self::QualifiedRule, cssparser::ParseError<'i, Self::Error>> {
let declarations = DeclarationListParser::new(input, DeclParser)
.filter_map(Result::ok) // ignore invalid property name or value
.collect();
Ok(Rule::QualifiedRule(QualifiedRule {
selectors: prelude,
declarations,
}))
}
}
// Required by `cssparser::RuleListParser`.
//
// This only handles the `@import` at-rule.
impl<'i> AtRuleParser<'i> for RuleParser {
type PreludeBlock = ();
type PreludeNoBlock = AtRulePrelude;
type AtRule = Rule;
type Error = CssParseErrorKind<'i>;
fn parse_prelude<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<AtRuleType<Self::PreludeNoBlock, Self::PreludeBlock>, ParseError<'i, Self::Error>>
{
match_ignore_ascii_case! { &name,
"import" => {
// FIXME: at the moment we ignore media queries
let url = input.expect_url_or_string()?.as_ref().to_owned();
Ok(AtRuleType::WithoutBlock(AtRulePrelude::Import(url)))
},
_ => Err(input.new_error(BasicParseErrorKind::AtRuleInvalid(name))),
}
}
fn rule_without_block(
&mut self,
prelude: Self::PreludeNoBlock,
_location: SourceLocation,
) -> Self::AtRule {
let AtRulePrelude::Import(url) = prelude;
Rule::AtRule(AtRule::Import(url))
}
}
/// Dummy type required by the SelectorImpl trait.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct NonTSPseudoClass;
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, _dest: &mut W) -> fmt::Result
where
W: fmt::Write,
{
Ok(())
}
}
impl selectors::parser::NonTSPseudoClass for NonTSPseudoClass {
type Impl = Selector;
fn is_active_or_hover(&self) -> bool {
false
}
fn is_user_action_state(&self) -> bool {
false
}
}
/// Dummy type required by the SelectorImpl trait
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PseudoElement;
impl ToCss for PseudoElement {
fn to_css<W>(&self, _dest: &mut W) -> fmt::Result
where
W: fmt::Write,
{
Ok(())
}
}
impl selectors::parser::PseudoElement for PseudoElement {
type Impl = Selector;
}
/// Holds all the types for the SelectorImpl trait
#[derive(Debug, Clone)]
pub struct Selector;
impl SelectorImpl for Selector {
type ExtraMatchingData = ();
type AttrValue = String;
type Identifier = LocalName;
type ClassName = LocalName;
type PartName = LocalName;
type LocalName = LocalName;
type NamespaceUrl = Namespace;
type NamespacePrefix = Prefix;
type BorrowedNamespaceUrl = Namespace;
type BorrowedLocalName = LocalName;
type NonTSPseudoClass = NonTSPseudoClass;
type PseudoElement = PseudoElement;
}
/// Wraps an `RsvgNode` with a locally-defined type, so we can implement
/// a foreign trait on it.
///
/// RsvgNode is an alias for rctree::Node, so we can't implement
/// `selectors::Element` directly on it. We implement it on the
/// `RsvgElement` wrapper instead.
#[derive(Clone)]
pub struct RsvgElement(RsvgNode);
impl From<RsvgNode> for RsvgElement {
fn from(n: RsvgNode) -> RsvgElement {
RsvgElement(n)
}
}
impl fmt::Debug for RsvgElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0.borrow())
}
}
// The selectors crate uses this to examine our tree of elements.
impl selectors::Element for RsvgElement {
type Impl = Selector;
/// Converts self into an opaque representation.
fn opaque(&self) -> OpaqueElement {
OpaqueElement::new(&self.0.borrow())
}
fn parent_element(&self) -> Option<Self> {
self.0.parent().map(|n| n.into())
}
/// Whether the parent node of this element is a shadow root.
fn parent_node_is_shadow_root(&self) -> bool {
// unsupported
false
}
/// The host of the containing shadow root, if any.
fn containing_shadow_host(&self) -> Option<Self> {
// unsupported
None
}
/// Whether we're matching on a pseudo-element.
fn is_pseudo_element(&self) -> bool {
// unsupported
false
}
/// Skips non-element nodes
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.0.previous_sibling();
while let Some(ref sib) = sibling {
if sib.borrow().get_type() != NodeType::Chars {
return sibling.map(|n| n.into());
}
sibling = self.0.previous_sibling();
}
None
}
/// Skips non-element nodes
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.0.next_sibling();
while let Some(ref sib) = sibling {
if sib.borrow().get_type() != NodeType::Chars {
return sibling.map(|n| n.into());
}
sibling = self.0.next_sibling();
}
None
}
fn is_html_element_in_html_document(&self) -> bool {
false
}
fn has_local_name(&self, local_name: &LocalName) -> bool {
self.0.borrow().element_name().local == *local_name
}
/// Empty string for no namespace
fn has_namespace(&self, ns: &Namespace) -> bool {
self.0.borrow().element_name().ns == *ns
}
/// Whether this element and the `other` element have the same local name and namespace.
fn is_same_type(&self, other: &Self) -> bool {
self.0.borrow().element_name() == other.0.borrow().element_name()
}
fn attr_matches(
&self,
_ns: &NamespaceConstraint<&Namespace>,
_local_name: &LocalName,
_operation: &AttrSelectorOperation<&String>,
) -> bool {
// unsupported
false
}
fn match_non_ts_pseudo_class<F>(
&self,
_pc: &<Self::Impl as SelectorImpl>::NonTSPseudoClass,
_context: &mut MatchingContext<Self::Impl>,
_flags_setter: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// unsupported
false
}
fn match_pseudo_element(
&self,
_pe: &<Self::Impl as SelectorImpl>::PseudoElement,
_context: &mut MatchingContext<Self::Impl>,
) -> bool {
// unsupported
false
}
/// Whether this element is a `link`.
fn is_link(&self) -> bool {
// FIXME: is this correct for SVG <a>, not HTML <a>?
self.0.borrow().get_type() == NodeType::Link
}
/// Returns whether the element is an HTML <slot> element.
fn is_html_slot_element(&self) -> bool {
false
}
fn has_id(&self, id: &LocalName, case_sensitivity: CaseSensitivity) -> bool {
self.0
.borrow()
.get_id()
.map(|self_id| case_sensitivity.eq(self_id.as_bytes(), id.as_ref().as_bytes()))
.unwrap_or(false)
}
fn has_class(&self, name: &LocalName, case_sensitivity: CaseSensitivity) -> bool {
self.0
.borrow()
.get_class()
.map(|classes| {
classes
.split_whitespace()
.any(|class| case_sensitivity.eq(class.as_bytes(), name.as_bytes()))
})
.unwrap_or(false)
}
fn is_part(&self, _name: &LocalName) -> bool {
// unsupported
false
}
/// Returns whether this element matches `:empty`.
///
/// That is, whether it does not contain any child element or any non-zero-length text node.
/// See http://dev.w3.org/csswg/selectors-3/#empty-pseudo
fn is_empty(&self) -> bool {
!self.0.has_children()
|| self.0.children().all(|child| {
child.borrow().get_type() == NodeType::Chars
&& child.borrow().get_impl::<NodeChars>().is_empty()
})
}
/// Returns whether this element matches `:root`,
/// i.e. whether it is the root element of a document.
///
/// Note: this can be false even if `.parent_element()` is `None`
/// if the parent node is a `DocumentFragment`.
fn is_root(&self) -> bool {
self.0.parent().is_none()
}
}
/// Origin for a stylesheet, per https://www.w3.org/TR/CSS22/cascade.html#cascading-order
///
/// This is used when sorting selector matches according to their origin and specificity.
pub enum Origin {
UserAgent,
User,
Author,
}
/// A parsed CSS stylesheet
pub struct Stylesheet {
origin: Origin,
qualified_rules: Vec<QualifiedRule>,
}
impl Stylesheet {
pub fn new(origin: Origin) -> Stylesheet {
Stylesheet {
origin,
qualified_rules: Vec::new(),
}
}
pub fn from_data(
buf: &str,
base_url: Option<&Url>,
origin: Origin,
) -> Result<Self, LoadingError> {
let mut stylesheet = Stylesheet::new(origin);
stylesheet.parse(buf, base_url)?;
Ok(stylesheet)
}
pub fn from_href(
href: &str,
base_url: Option<&Url>,
origin: Origin,
) -> Result<Self, LoadingError> {
let mut stylesheet = Stylesheet::new(origin);
stylesheet.load(href, base_url)?;
Ok(stylesheet)
}
/// Parses a CSS stylesheet from a string
///
/// The `base_url` is required for `@import` rules, so that librsvg
/// can determine if the requested path is allowed.
fn parse(&mut self, buf: &str, base_url: Option<&Url>) -> Result<(), LoadingError> {
let mut input = ParserInput::new(buf);
let mut parser = Parser::new(&mut input);
RuleListParser::new_for_stylesheet(&mut parser, RuleParser)
.filter_map(Result::ok) // ignore invalid rules
.for_each(|rule| match rule {
Rule::AtRule(AtRule::Import(url)) => {
// ignore invalid imports
let _ = self.load(&url, base_url);
}
Rule::QualifiedRule(qr) => self.qualified_rules.push(qr),
});
Ok(())
}
/// Parses a stylesheet referenced by an URL
fn load(&mut self, href: &str, base_url: Option<&Url>) -> Result<(), LoadingError> {
let aurl = AllowedUrl::from_href(href, base_url).map_err(|_| LoadingError::BadUrl)?;
io::acquire_data(&aurl, None)
.and_then(|data| {
let BinaryData {
data: bytes,
content_type,
} = data;
if content_type.as_ref().map(String::as_ref) == Some("text/css") {
Ok(bytes)
} else {
rsvg_log!("\"{}\" is not of type text/css; ignoring", aurl);
Err(LoadingError::BadCss)
}
})
.and_then(|bytes| {
String::from_utf8(bytes).map_err(|_| {
rsvg_log!(
"\"{}\" does not contain valid UTF-8 CSS data; ignoring",
aurl
);
LoadingError::BadCss
})
})
.and_then(|utf8| self.parse(&utf8, base_url))
}
/// Appends the style declarations that match a specified node to a given vector
fn get_matches<'a>(
&'a self,
node: &RsvgNode,
match_ctx: &mut MatchingContext<Selector>,
acc: &mut Vec<Match<'a>>,
) {
for rule in &self.qualified_rules {
for selector in &rule.selectors.0 {
// This magic call is stolen from selectors::matching::matches_selector_list()
if selectors::matching::matches_selector(
selector,
0,
None,
&RsvgElement(node.clone()),
match_ctx,
&mut |_, _| {},
) {
for decl in rule.declarations.iter() {
acc.push(Match {
declaration: decl,
specificity: selector.specificity(),
});
}
}
}
}
}
}
struct Match<'a> {
declaration: &'a Declaration,
specificity: u32,
}
/// Runs the CSS cascade on the specified tree from all the stylesheets
pub fn cascade(root: &mut RsvgNode, stylesheets: &[Stylesheet]) {
for mut node in root.descendants() {
let mut matches = Vec::new();
let mut match_ctx = MatchingContext::new(
MatchingMode::Normal,
// FIXME: how the fuck does one set up a bloom filter here?
None,
// n_index_cache,
None,
QuirksMode::NoQuirks,
);
for stylesheet in stylesheets {
stylesheet.get_matches(&node, &mut match_ctx, &mut matches);
}
matches
.as_mut_slice()
.sort_by(|a, b| a.specificity.cmp(&b.specificity));
for m in matches {
node.borrow_mut().apply_style_declaration(m.declaration);
}
node.borrow_mut().set_style_attribute();
}
let values = ComputedValues::default();
root.cascade(&values);
}
Copy the stylesheet's origin into the Match struct
//! Representation of CSS types, and the CSS parsing and matching engine.
//!
//! # Terminology
//!
//! Consider a CSS **stylesheet** like this:
//!
//! ```ignore
//! @import url("another.css");
//!
//! foo, .bar {
//! fill: red;
//! stroke: green;
//! }
//!
//! #baz { stroke-width: 42; }
//!
//! ```
//! The example contains three **rules**, the first one is an **at-rule*,
//! the other two are **qualified rules**.
//!
//! Each rule is made of two parts, a **prelude** and an optional **block**
//! The prelude is the part until the first `{` or until `;`, depending on
//! whether a block is present. The block is the part between curly braces.
//!
//! Let's look at each rule:
//!
//! `@import` is an **at-rule**. This rule has a prelude, but no block.
//! There are other at-rules like `@media` and some of them may have a block,
//! but librsvg doesn't support those yet.
//!
//! The prelude of the following rule is `foo, .bar`.
//! It is a **selector list** with two **selectors**, one for
//! `foo` elements and one for elements that have the `bar` class.
//!
//! The content of the block between `{}` for a qualified rule is a
//! **declaration list**. The block of the first qualified rule contains two
//! **declarations**, one for the `fill` **property** and one for the
//! `stroke` property.
//!
//! After ther first qualified rule, we have a second qualified rule with
//! a single selector for the `#baz` id, with a single declaration for the
//! `stroke-width` property.
//!
//! # Helper crates we use
//!
//! * `cssparser` crate as a CSS tokenizer, and some utilities to
//! parse CSS rules and declarations.
//!
//! * `selectors` crate for the representation of selectors and
//! selector lists, and for the matching engine.
//!
//! Both crates provide very generic implementations of their concepts,
//! and expect the caller to provide implementations of various traits,
//! and to provide types that represent certain things.
//!
//! For example, `cssparser` expects one to provide representations of
//! the following types:
//!
//! * A parsed CSS rule. For `fill: blue;` we have
//! `ParsedProperty::Fill(...)`.
//!
//! * A parsed selector list; we use `SelectorList` from the
//! `selectors` crate.
//!
//! In turn, the `selectors` crate needs a way to navigate and examine
//! one's implementation of an element tree. We provide `impl
//! selectors::Element for RsvgElement` for this. This implementation
//! has methods like "does this element have the id `#foo`", or "give
//! me the next sibling element".
//!
//! Finally, the matching engine ties all of this together with
//! `matches_selector()`. This takes an opaque representation of an
//! element, plus a selector, and returns a bool. We iterate through
//! the rules in the stylesheets and gather the matches; then sort the
//! matches by specificity and apply the result to each element.
use cssparser::*;
use selectors::attr::{AttrSelectorOperation, CaseSensitivity, NamespaceConstraint};
use selectors::matching::{ElementSelectorFlags, MatchingContext, MatchingMode, QuirksMode};
use selectors::{self, OpaqueElement, SelectorImpl, SelectorList};
use std::fmt;
use std::str;
use markup5ever::{namespace_url, ns, LocalName, Namespace, Prefix, QualName};
use url::Url;
use crate::allowed_url::AllowedUrl;
use crate::error::*;
use crate::io::{self, BinaryData};
use crate::node::{NodeCascade, NodeType, RsvgNode};
use crate::properties::{parse_property, ComputedValues, ParsedProperty};
use crate::text::NodeChars;
/// A parsed CSS declaration
///
/// For example, in the declaration `fill: green !important`, the
/// `prop_name` would be `fill`, the `property` would be
/// `ParsedProperty::Fill(...)` with the green value, and `important`
/// would be `true`.
pub struct Declaration {
pub prop_name: QualName,
pub property: ParsedProperty,
pub important: bool,
}
/// Dummy struct required to use `cssparser::DeclarationListParser`
///
/// It implements `cssparser::DeclarationParser`, which knows how to parse
/// the property/value pairs from a CSS declaration.
pub struct DeclParser;
impl<'i> DeclarationParser<'i> for DeclParser {
type Declaration = Declaration;
type Error = ValueErrorKind;
/// Parses a CSS declaration like `name: input_value [!important]`
fn parse_value<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<Declaration, cssparser::ParseError<'i, ValueErrorKind>> {
let prop_name = QualName::new(None, ns!(svg), LocalName::from(name.as_ref()));
let property =
parse_property(&prop_name, input, true).map_err(|e| input.new_custom_error(e))?;
let important = input.try_parse(parse_important).is_ok();
Ok(Declaration {
prop_name,
property,
important,
})
}
}
// cssparser's DeclarationListParser requires this; we just use the dummy
// implementations from cssparser itself. We may want to provide a real
// implementation in the future, although this may require keeping track of the
// CSS parsing state like Servo does.
impl<'i> AtRuleParser<'i> for DeclParser {
type PreludeBlock = ();
type PreludeNoBlock = ();
type AtRule = Declaration;
type Error = ValueErrorKind;
}
/// Dummy struct to implement cssparser::QualifiedRuleParser and
/// cssparser::AtRuleParser
pub struct RuleParser;
/// Errors from the CSS parsing process
pub enum CssParseErrorKind<'i> {
Selector(selectors::parser::SelectorParseErrorKind<'i>),
Value(ValueErrorKind),
}
impl<'i> From<selectors::parser::SelectorParseErrorKind<'i>> for CssParseErrorKind<'i> {
fn from(e: selectors::parser::SelectorParseErrorKind) -> CssParseErrorKind {
CssParseErrorKind::Selector(e)
}
}
/// A CSS qualified rule (or ruleset)
pub struct QualifiedRule {
selectors: SelectorList<Selector>,
declarations: Vec<Declaration>,
}
/// Prelude of at-rule used in the AtRuleParser.
pub enum AtRulePrelude {
Import(String),
}
/// A CSS at-rule (or ruleset)
pub enum AtRule {
Import(String),
}
/// A CSS rule (or ruleset)
pub enum Rule {
AtRule(AtRule),
QualifiedRule(QualifiedRule),
}
// Required to implement the `Prelude` associated type in `cssparser::QualifiedRuleParser`
impl<'i> selectors::Parser<'i> for RuleParser {
type Impl = Selector;
type Error = CssParseErrorKind<'i>;
fn default_namespace(&self) -> Option<<Self::Impl as SelectorImpl>::NamespaceUrl> {
Some(ns!(svg))
}
fn namespace_for_prefix(
&self,
_prefix: &<Self::Impl as SelectorImpl>::NamespacePrefix,
) -> Option<<Self::Impl as SelectorImpl>::NamespaceUrl> {
// FIXME: Do we need to keep a lookup table extracted from libxml2's
// XML namespaces?
//
// Or are CSS namespaces completely different, declared elsewhere?
None
}
}
// `cssparser::RuleListParser` is a struct which requires that we
// provide a type that implements `cssparser::QualifiedRuleParser`.
//
// In turn, `cssparser::QualifiedRuleParser` requires that we
// implement a way to parse the `Prelude` of a ruleset or rule. For
// example, in this ruleset:
//
// ```ignore
// foo, .bar { fill: red; stroke: green; }
// ```
//
// The prelude is the selector list with the `foo` and `.bar` selectors.
//
// The `parse_prelude` method just uses `selectors::SelectorList`. This
// is what requires the `impl selectors::Parser for RuleParser`.
//
// Next, the `parse_block` method takes an already-parsed prelude (a selector list),
// and tries to parse the block between braces. It creates a `Rule` out of
// the selector list and the declaration list.
impl<'i> QualifiedRuleParser<'i> for RuleParser {
type Prelude = SelectorList<Selector>;
type QualifiedRule = Rule;
type Error = CssParseErrorKind<'i>;
fn parse_prelude<'t>(
&mut self,
input: &mut Parser<'i, 't>,
) -> Result<Self::Prelude, cssparser::ParseError<'i, Self::Error>> {
SelectorList::parse(self, input)
}
fn parse_block<'t>(
&mut self,
prelude: Self::Prelude,
_location: SourceLocation,
input: &mut Parser<'i, 't>,
) -> Result<Self::QualifiedRule, cssparser::ParseError<'i, Self::Error>> {
let declarations = DeclarationListParser::new(input, DeclParser)
.filter_map(Result::ok) // ignore invalid property name or value
.collect();
Ok(Rule::QualifiedRule(QualifiedRule {
selectors: prelude,
declarations,
}))
}
}
// Required by `cssparser::RuleListParser`.
//
// This only handles the `@import` at-rule.
impl<'i> AtRuleParser<'i> for RuleParser {
type PreludeBlock = ();
type PreludeNoBlock = AtRulePrelude;
type AtRule = Rule;
type Error = CssParseErrorKind<'i>;
fn parse_prelude<'t>(
&mut self,
name: CowRcStr<'i>,
input: &mut Parser<'i, 't>,
) -> Result<AtRuleType<Self::PreludeNoBlock, Self::PreludeBlock>, ParseError<'i, Self::Error>>
{
match_ignore_ascii_case! { &name,
"import" => {
// FIXME: at the moment we ignore media queries
let url = input.expect_url_or_string()?.as_ref().to_owned();
Ok(AtRuleType::WithoutBlock(AtRulePrelude::Import(url)))
},
_ => Err(input.new_error(BasicParseErrorKind::AtRuleInvalid(name))),
}
}
fn rule_without_block(
&mut self,
prelude: Self::PreludeNoBlock,
_location: SourceLocation,
) -> Self::AtRule {
let AtRulePrelude::Import(url) = prelude;
Rule::AtRule(AtRule::Import(url))
}
}
/// Dummy type required by the SelectorImpl trait.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct NonTSPseudoClass;
impl ToCss for NonTSPseudoClass {
fn to_css<W>(&self, _dest: &mut W) -> fmt::Result
where
W: fmt::Write,
{
Ok(())
}
}
impl selectors::parser::NonTSPseudoClass for NonTSPseudoClass {
type Impl = Selector;
fn is_active_or_hover(&self) -> bool {
false
}
fn is_user_action_state(&self) -> bool {
false
}
}
/// Dummy type required by the SelectorImpl trait
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PseudoElement;
impl ToCss for PseudoElement {
fn to_css<W>(&self, _dest: &mut W) -> fmt::Result
where
W: fmt::Write,
{
Ok(())
}
}
impl selectors::parser::PseudoElement for PseudoElement {
type Impl = Selector;
}
/// Holds all the types for the SelectorImpl trait
#[derive(Debug, Clone)]
pub struct Selector;
impl SelectorImpl for Selector {
type ExtraMatchingData = ();
type AttrValue = String;
type Identifier = LocalName;
type ClassName = LocalName;
type PartName = LocalName;
type LocalName = LocalName;
type NamespaceUrl = Namespace;
type NamespacePrefix = Prefix;
type BorrowedNamespaceUrl = Namespace;
type BorrowedLocalName = LocalName;
type NonTSPseudoClass = NonTSPseudoClass;
type PseudoElement = PseudoElement;
}
/// Wraps an `RsvgNode` with a locally-defined type, so we can implement
/// a foreign trait on it.
///
/// RsvgNode is an alias for rctree::Node, so we can't implement
/// `selectors::Element` directly on it. We implement it on the
/// `RsvgElement` wrapper instead.
#[derive(Clone)]
pub struct RsvgElement(RsvgNode);
impl From<RsvgNode> for RsvgElement {
fn from(n: RsvgNode) -> RsvgElement {
RsvgElement(n)
}
}
impl fmt::Debug for RsvgElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0.borrow())
}
}
// The selectors crate uses this to examine our tree of elements.
impl selectors::Element for RsvgElement {
type Impl = Selector;
/// Converts self into an opaque representation.
fn opaque(&self) -> OpaqueElement {
OpaqueElement::new(&self.0.borrow())
}
fn parent_element(&self) -> Option<Self> {
self.0.parent().map(|n| n.into())
}
/// Whether the parent node of this element is a shadow root.
fn parent_node_is_shadow_root(&self) -> bool {
// unsupported
false
}
/// The host of the containing shadow root, if any.
fn containing_shadow_host(&self) -> Option<Self> {
// unsupported
None
}
/// Whether we're matching on a pseudo-element.
fn is_pseudo_element(&self) -> bool {
// unsupported
false
}
/// Skips non-element nodes
fn prev_sibling_element(&self) -> Option<Self> {
let mut sibling = self.0.previous_sibling();
while let Some(ref sib) = sibling {
if sib.borrow().get_type() != NodeType::Chars {
return sibling.map(|n| n.into());
}
sibling = self.0.previous_sibling();
}
None
}
/// Skips non-element nodes
fn next_sibling_element(&self) -> Option<Self> {
let mut sibling = self.0.next_sibling();
while let Some(ref sib) = sibling {
if sib.borrow().get_type() != NodeType::Chars {
return sibling.map(|n| n.into());
}
sibling = self.0.next_sibling();
}
None
}
fn is_html_element_in_html_document(&self) -> bool {
false
}
fn has_local_name(&self, local_name: &LocalName) -> bool {
self.0.borrow().element_name().local == *local_name
}
/// Empty string for no namespace
fn has_namespace(&self, ns: &Namespace) -> bool {
self.0.borrow().element_name().ns == *ns
}
/// Whether this element and the `other` element have the same local name and namespace.
fn is_same_type(&self, other: &Self) -> bool {
self.0.borrow().element_name() == other.0.borrow().element_name()
}
fn attr_matches(
&self,
_ns: &NamespaceConstraint<&Namespace>,
_local_name: &LocalName,
_operation: &AttrSelectorOperation<&String>,
) -> bool {
// unsupported
false
}
fn match_non_ts_pseudo_class<F>(
&self,
_pc: &<Self::Impl as SelectorImpl>::NonTSPseudoClass,
_context: &mut MatchingContext<Self::Impl>,
_flags_setter: &mut F,
) -> bool
where
F: FnMut(&Self, ElementSelectorFlags),
{
// unsupported
false
}
fn match_pseudo_element(
&self,
_pe: &<Self::Impl as SelectorImpl>::PseudoElement,
_context: &mut MatchingContext<Self::Impl>,
) -> bool {
// unsupported
false
}
/// Whether this element is a `link`.
fn is_link(&self) -> bool {
// FIXME: is this correct for SVG <a>, not HTML <a>?
self.0.borrow().get_type() == NodeType::Link
}
/// Returns whether the element is an HTML <slot> element.
fn is_html_slot_element(&self) -> bool {
false
}
fn has_id(&self, id: &LocalName, case_sensitivity: CaseSensitivity) -> bool {
self.0
.borrow()
.get_id()
.map(|self_id| case_sensitivity.eq(self_id.as_bytes(), id.as_ref().as_bytes()))
.unwrap_or(false)
}
fn has_class(&self, name: &LocalName, case_sensitivity: CaseSensitivity) -> bool {
self.0
.borrow()
.get_class()
.map(|classes| {
classes
.split_whitespace()
.any(|class| case_sensitivity.eq(class.as_bytes(), name.as_bytes()))
})
.unwrap_or(false)
}
fn is_part(&self, _name: &LocalName) -> bool {
// unsupported
false
}
/// Returns whether this element matches `:empty`.
///
/// That is, whether it does not contain any child element or any non-zero-length text node.
/// See http://dev.w3.org/csswg/selectors-3/#empty-pseudo
fn is_empty(&self) -> bool {
!self.0.has_children()
|| self.0.children().all(|child| {
child.borrow().get_type() == NodeType::Chars
&& child.borrow().get_impl::<NodeChars>().is_empty()
})
}
/// Returns whether this element matches `:root`,
/// i.e. whether it is the root element of a document.
///
/// Note: this can be false even if `.parent_element()` is `None`
/// if the parent node is a `DocumentFragment`.
fn is_root(&self) -> bool {
self.0.parent().is_none()
}
}
/// Origin for a stylesheet, per https://www.w3.org/TR/CSS22/cascade.html#cascading-order
///
/// This is used when sorting selector matches according to their origin and specificity.
#[derive(Copy, Clone)]
pub enum Origin {
UserAgent,
User,
Author,
}
/// A parsed CSS stylesheet
pub struct Stylesheet {
origin: Origin,
qualified_rules: Vec<QualifiedRule>,
}
/// A match during the selector matching process
///
/// This struct comes from `Stylesheet.get_matches()`, and represents
/// that a certain node matched a CSS rule which has a selector with a
/// certain `specificity`. The stylesheet's `origin` is also given here
/// to aid sorting the results.
struct Match<'a> {
declaration: &'a Declaration,
specificity: u32,
origin: Origin,
}
impl Stylesheet {
pub fn new(origin: Origin) -> Stylesheet {
Stylesheet {
origin,
qualified_rules: Vec::new(),
}
}
pub fn from_data(
buf: &str,
base_url: Option<&Url>,
origin: Origin,
) -> Result<Self, LoadingError> {
let mut stylesheet = Stylesheet::new(origin);
stylesheet.parse(buf, base_url)?;
Ok(stylesheet)
}
pub fn from_href(
href: &str,
base_url: Option<&Url>,
origin: Origin,
) -> Result<Self, LoadingError> {
let mut stylesheet = Stylesheet::new(origin);
stylesheet.load(href, base_url)?;
Ok(stylesheet)
}
/// Parses a CSS stylesheet from a string
///
/// The `base_url` is required for `@import` rules, so that librsvg
/// can determine if the requested path is allowed.
fn parse(&mut self, buf: &str, base_url: Option<&Url>) -> Result<(), LoadingError> {
let mut input = ParserInput::new(buf);
let mut parser = Parser::new(&mut input);
RuleListParser::new_for_stylesheet(&mut parser, RuleParser)
.filter_map(Result::ok) // ignore invalid rules
.for_each(|rule| match rule {
Rule::AtRule(AtRule::Import(url)) => {
// ignore invalid imports
let _ = self.load(&url, base_url);
}
Rule::QualifiedRule(qr) => self.qualified_rules.push(qr),
});
Ok(())
}
/// Parses a stylesheet referenced by an URL
fn load(&mut self, href: &str, base_url: Option<&Url>) -> Result<(), LoadingError> {
let aurl = AllowedUrl::from_href(href, base_url).map_err(|_| LoadingError::BadUrl)?;
io::acquire_data(&aurl, None)
.and_then(|data| {
let BinaryData {
data: bytes,
content_type,
} = data;
if content_type.as_ref().map(String::as_ref) == Some("text/css") {
Ok(bytes)
} else {
rsvg_log!("\"{}\" is not of type text/css; ignoring", aurl);
Err(LoadingError::BadCss)
}
})
.and_then(|bytes| {
String::from_utf8(bytes).map_err(|_| {
rsvg_log!(
"\"{}\" does not contain valid UTF-8 CSS data; ignoring",
aurl
);
LoadingError::BadCss
})
})
.and_then(|utf8| self.parse(&utf8, base_url))
}
/// Appends the style declarations that match a specified node to a given vector
fn get_matches<'a>(
&'a self,
node: &RsvgNode,
match_ctx: &mut MatchingContext<Selector>,
acc: &mut Vec<Match<'a>>,
) {
for rule in &self.qualified_rules {
for selector in &rule.selectors.0 {
// This magic call is stolen from selectors::matching::matches_selector_list()
if selectors::matching::matches_selector(
selector,
0,
None,
&RsvgElement(node.clone()),
match_ctx,
&mut |_, _| {},
) {
for decl in rule.declarations.iter() {
acc.push(Match {
declaration: decl,
specificity: selector.specificity(),
origin: self.origin,
});
}
}
}
}
}
}
/// Runs the CSS cascade on the specified tree from all the stylesheets
pub fn cascade(root: &mut RsvgNode, stylesheets: &[Stylesheet]) {
for mut node in root.descendants() {
let mut matches = Vec::new();
let mut match_ctx = MatchingContext::new(
MatchingMode::Normal,
// FIXME: how the fuck does one set up a bloom filter here?
None,
// n_index_cache,
None,
QuirksMode::NoQuirks,
);
for stylesheet in stylesheets {
stylesheet.get_matches(&node, &mut match_ctx, &mut matches);
}
matches
.as_mut_slice()
.sort_by(|a, b| a.specificity.cmp(&b.specificity));
for m in matches {
node.borrow_mut().apply_style_declaration(m.declaration);
}
node.borrow_mut().set_style_attribute();
}
let values = ComputedValues::default();
root.cascade(&values);
}
|
use std::io::fs::File;
use gl;
use gl::types::GLuint;
use libc::c_void;
use image;
use image::Image;
use graphics::ImageSize;
/// Wraps OpenGL texture data.
/// The texture gets deleted when running out of scope.
pub struct Texture {
id: GLuint,
width: u32,
height: u32,
}
impl Texture {
/// Creates a new texture.
#[inline(always)]
pub fn new(id: GLuint, width: u32, height: u32) -> Texture {
Texture {
id: id,
width: width,
height: height,
}
}
/// Gets the OpenGL id of the texture.
#[inline(always)]
pub fn get_id(&self) -> GLuint {
self.id
}
/// Loads image from memory, the format is 8-bit greyscale.
pub fn from_memory_alpha(buf: &[u8], width: u32, height: u32) -> Result<Texture, String> {
let mut pixels = Vec::new();
for alpha in buf.iter() {
pixels.push(255);
pixels.push(255);
pixels.push(255);
pixels.push(*alpha);
}
let mut id: GLuint = 0;
unsafe {
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MIN_FILTER,
gl::LINEAR as i32
);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MAG_FILTER,
gl::LINEAR as i32
);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
pixels.as_ptr() as *const c_void
);
}
Ok(Texture::new(id, width, height))
}
/// Loads image by relative file name to the asset root.
pub fn from_path(path: &Path) -> Result<Texture, String> {
let fin = match File::open(path) {
Ok(fin) => fin,
Err(e) => return Err(format!("Could not load '{}': {}",
path.filename_str().unwrap(), e)),
};
let img = match Image::load(fin, image::PNG) {
Ok(img) => img,
Err(e) => return Err(format!("Could not load '{}': {}",
path.filename_str().unwrap(), e)),
};
match img.colortype() {
image::RGBA(8) => {},
c => fail!("Unsupported color type {} in png", c),
};
let (width, height) = img.dimensions();
let mut id: GLuint = 0;
unsafe {
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MIN_FILTER,
gl::LINEAR as i32
);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MAG_FILTER,
gl::LINEAR as i32
);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
img.raw_pixels().as_ptr() as *const c_void
);
}
Ok(Texture::new(id, width, height))
}
}
impl Drop for Texture {
fn drop(&mut self) {
unsafe {
gl::DeleteTextures(1, [self.id].as_ptr());
}
}
}
impl ImageSize for Texture {
fn get_size(&self) -> (u32, u32) {
(self.width, self.height)
}
}
Upgraded to latest rust-image
use gl;
use gl::types::GLuint;
use libc::c_void;
use image;
use image::GenericImage;
use graphics::ImageSize;
/// Wraps OpenGL texture data.
/// The texture gets deleted when running out of scope.
pub struct Texture {
id: GLuint,
width: u32,
height: u32,
}
impl Texture {
/// Creates a new texture.
#[inline(always)]
pub fn new(id: GLuint, width: u32, height: u32) -> Texture {
Texture {
id: id,
width: width,
height: height,
}
}
/// Gets the OpenGL id of the texture.
#[inline(always)]
pub fn get_id(&self) -> GLuint {
self.id
}
/// Loads image from memory, the format is 8-bit greyscale.
pub fn from_memory_alpha(buf: &[u8], width: u32, height: u32) -> Result<Texture, String> {
let mut pixels = Vec::new();
for alpha in buf.iter() {
pixels.push(255);
pixels.push(255);
pixels.push(255);
pixels.push(*alpha);
}
let mut id: GLuint = 0;
unsafe {
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MIN_FILTER,
gl::LINEAR as i32
);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MAG_FILTER,
gl::LINEAR as i32
);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
pixels.as_ptr() as *const c_void
);
}
Ok(Texture::new(id, width, height))
}
/// Loads image by relative file name to the asset root.
pub fn from_path(path: &Path) -> Result<Texture, String> {
let img = match image::open(path) {
Ok(img) => img,
Err(e) => return Err(format!("Could not load '{}': {}",
path.filename_str().unwrap(), e)),
};
match img.color() {
image::RGBA(8) => {},
c => fail!("Unsupported color type {} in png", c),
};
let (width, height) = img.dimensions();
let mut id: GLuint = 0;
unsafe {
gl::GenTextures(1, &mut id);
gl::BindTexture(gl::TEXTURE_2D, id);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MIN_FILTER,
gl::LINEAR as i32
);
gl::TexParameteri(
gl::TEXTURE_2D,
gl::TEXTURE_MAG_FILTER,
gl::LINEAR as i32
);
gl::TexImage2D(
gl::TEXTURE_2D,
0,
gl::RGBA as i32,
width as i32,
height as i32,
0,
gl::RGBA,
gl::UNSIGNED_BYTE,
img.raw_pixels().as_ptr() as *const c_void
);
}
Ok(Texture::new(id, width, height))
}
}
impl Drop for Texture {
fn drop(&mut self) {
unsafe {
gl::DeleteTextures(1, [self.id].as_ptr());
}
}
}
impl ImageSize for Texture {
fn get_size(&self) -> (u32, u32) {
(self.width, self.height)
}
}
|
use cssparser::Parser;
use parsers::Parse;
use parsers::ParseError;
#[derive(Debug, Clone, PartialEq)]
pub enum IRI {
None,
Resource(String),
}
impl Default for IRI {
fn default() -> IRI {
IRI::None
}
}
impl Parse for IRI {
type Data = ();
type Err = ParseError;
fn parse(parser: &mut Parser, _: Self::Data) -> Result<IRI, ParseError> {
if parser.try(|i| i.expect_ident_matching("none")).is_ok() {
Ok(IRI::None)
} else {
let url = parser
.expect_url()
.map_err(|_| ParseError::new("expected url"))?;
parser
.expect_exhausted()
.map_err(|_| ParseError::new("expected url"))?;
Ok(IRI::Resource(url.as_ref().to_owned()))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parses_none() {
assert_eq!(IRI::parse_str("none", ()), Ok(IRI::None));
}
#[test]
fn parses_url() {
assert_eq!(
IRI::parse_str("url(foo)", ()),
Ok(IRI::Resource("foo".to_string()))
);
// be permissive if the closing ) is missing
assert_eq!(
IRI::parse_str("url(", ()),
Ok(IRI::Resource("".to_string()))
);
assert_eq!(
IRI::parse_str("url(foo", ()),
Ok(IRI::Resource("foo".to_string()))
);
assert!(IRI::parse_str("", ()).is_err());
assert!(IRI::parse_str("foo", ()).is_err());
assert!(IRI::parse_str("url(foo)bar", ()).is_err());
}
}
IRI::get() - new method
use cssparser::Parser;
use parsers::Parse;
use parsers::ParseError;
#[derive(Debug, Clone, PartialEq)]
pub enum IRI {
None,
Resource(String),
}
impl Default for IRI {
fn default() -> IRI {
IRI::None
}
}
impl IRI {
/// Returns the contents of an `IRI::Resource`, or `None`
pub fn get(&self) -> Option<&str> {
match *self {
IRI::None => None,
IRI::Resource(ref s) => Some(s.as_ref()),
}
}
}
impl Parse for IRI {
type Data = ();
type Err = ParseError;
fn parse(parser: &mut Parser, _: Self::Data) -> Result<IRI, ParseError> {
if parser.try(|i| i.expect_ident_matching("none")).is_ok() {
Ok(IRI::None)
} else {
let url = parser
.expect_url()
.map_err(|_| ParseError::new("expected url"))?;
parser
.expect_exhausted()
.map_err(|_| ParseError::new("expected url"))?;
Ok(IRI::Resource(url.as_ref().to_owned()))
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parses_none() {
assert_eq!(IRI::parse_str("none", ()), Ok(IRI::None));
}
#[test]
fn parses_url() {
assert_eq!(
IRI::parse_str("url(foo)", ()),
Ok(IRI::Resource("foo".to_string()))
);
// be permissive if the closing ) is missing
assert_eq!(
IRI::parse_str("url(", ()),
Ok(IRI::Resource("".to_string()))
);
assert_eq!(
IRI::parse_str("url(foo", ()),
Ok(IRI::Resource("foo".to_string()))
);
assert!(IRI::parse_str("", ()).is_err());
assert!(IRI::parse_str("foo", ()).is_err());
assert!(IRI::parse_str("url(foo)bar", ()).is_err());
}
#[test]
fn get() {
assert_eq!(IRI::None.get(), None);
assert_eq!(IRI::Resource(String::from("foo")).get(), Some("foo"));
}
}
|
use std::cmp::max;
use std::collections::HashMap;
use std::string::ToString;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::{BlendMode, Texture, TextureCreator, WindowCanvas};
use sdl2::surface::Surface;
use sdl2::ttf::Font as Sdl2Font;
use sdl2::video::WindowContext;
use crate::ui::types::ScreenText;
const ASCII: &str = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
pub struct Font<'a> {
texture: Texture<'a>,
glyphs: Vec<GlyphRegion>,
line_height: u32,
space_advance: i32,
texture_creator: &'a TextureCreator<WindowContext>,
cached_texts: HashMap<String, (Texture<'a>, u32, u32)>,
}
struct GlyphRegion {
start: i32,
advance: i32,
width: u32,
height: u32,
}
impl<'a> Font<'a> {
pub fn from_font(
texture_creator: &'a TextureCreator<sdl2::video::WindowContext>,
font: Sdl2Font,
) -> Result<Self, String> {
let mut total_width = 0;
let mut total_height = 0;
let mut glyphs: Vec<GlyphRegion> = Vec::new();
let mut space_advance = 0;
for c in ASCII.chars() {
if let Some(metric) = font.find_glyph_metrics(c) {
let (w, h) = font.size_of_char(c).map_err(to_string)?;
glyphs.push(GlyphRegion {
start: total_width as i32,
width: w,
height: h,
advance: metric.advance,
});
if c == ' ' {
space_advance = metric.advance;
}
total_width += w;
total_height = h;
} else {
return Err(format!("Unsupported character: {}", c));
}
}
let mut font_canvas = Surface::new(
total_width,
total_height,
texture_creator.default_pixel_format(),
)?
.into_canvas()?;
let font_texture_creator = font_canvas.texture_creator();
let mut x = 0;
for (i, c) in ASCII.char_indices() {
let GlyphRegion { width, .. } = glyphs[i];
let char_surface = font
.render(&c.to_string())
.blended(Color::RGBA(255, 255, 255, 255))
.map_err(to_string)?;
let char_tex = font_texture_creator
.create_texture_from_surface(&char_surface)
.map_err(to_string)?;
let target = Rect::new(x, 0, width, total_height);
font_canvas.copy(&char_tex, None, Some(target))?;
x += width as i32;
}
let texture = texture_creator
.create_texture_from_surface(font_canvas.into_surface())
.map_err(to_string)?;
Ok(Font {
texture,
glyphs,
line_height: total_height,
space_advance,
texture_creator,
cached_texts: HashMap::new(),
})
}
pub fn draw(&mut self, screen_txt: ScreenText, cvs: &mut WindowCanvas) -> Result<(), String> {
let cache_key = screen_txt.text.to_string();
if let Some((tex, w, h)) = self.cached_texts.get(&cache_key) {
let (x, y) = screen_txt.pos.to_xy();
return cvs.copy(tex, Rect::new(0, 0, *w, *h), Rect::new(x, y, *w, *h));
}
let (x, y) = screen_txt.pos.to_xy();
let prepared_text = prepare(screen_txt, self);
let (w, h) = prepared_text.dim;
let pixel_format = self.texture_creator.default_pixel_format();
let mut target_tex = self
.texture_creator
// !!! ATTENSION: there seems to be a very wierd issue if texture is to small
// the background/transparency of small textures is broken
.create_texture_target(pixel_format, max(w, 65), max(h, 33))
.map_err(to_string)?;
target_tex.set_blend_mode(BlendMode::Blend);
draw_text(
prepared_text,
cvs,
&mut self.texture,
&mut target_tex,
(x, y, w, h),
)?;
self.cached_texts.insert(cache_key, (target_tex, w, h));
Ok(())
}
}
struct PreparedWord {
chars: Vec<(i32, i32, u32, u32)>,
width: u32,
}
impl PreparedWord {
fn prepare(glyphs: &Vec<GlyphRegion>, txt: &str) -> Self {
let mut x = 0;
let mut chars = Vec::new();
for c in txt.chars() {
if let Some(r) = find_glyph_region(c, glyphs) {
chars.push((r.start, r.advance, r.width, r.height));
x = x + r.advance;
}
}
PreparedWord {
chars,
width: x as u32,
}
}
fn draw(
self: &Self,
texture: &Texture,
cvs: &mut WindowCanvas,
target: &mut Texture,
pos: (i32, i32),
) -> Result<(), String> {
let (mut x, y) = pos;
for (start, advance, width, height) in self.chars.iter() {
let from = Rect::new(*start, 0, *width, *height);
let to = Rect::new(x, y, *width, *height);
cvs.with_texture_canvas(target, |texture_canvas| {
texture_canvas.copy(&texture, Some(from), Some(to)).unwrap();
})
.map_err(to_string)?;
x = x + advance;
}
Ok(())
}
}
struct PreparedText {
words: Vec<((i32, i32), PreparedWord)>,
dim: (u32, u32),
color: (u8, u8, u8, u8),
background: Option<Color>,
padding: u32,
border: Option<(u32, Color)>,
}
fn prepare<'a>(text: ScreenText, font: &'a Font) -> PreparedText {
let (mut x, mut y) = (0, 0);
let mut words = Vec::new();
let mut width_so_far: u32 = 0;
let border_width = text.border.map(|(w, _)| w).unwrap_or(0);
let spacing = 2 * text.padding + 2 * border_width;
let max_width = text.max_width - spacing;
for line in text.text.into_string().lines() {
for t in line.split_whitespace() {
let word = PreparedWord::prepare(&font.glyphs, t);
let text_width = word.width;
let advance = font.space_advance + text_width as i32;
if x > 0 && (x + advance) as u32 > max_width {
// text does not fit in current line
// => wrap text (no wrap if first word in line)
x = 0;
y += font.line_height as i32;
width_so_far = max_width;
}
words.push(((x, y), word));
x += advance;
if x as u32 > width_so_far {
width_so_far = x as u32;
}
}
x = 0;
y += font.line_height as i32;
}
let width = max(text.min_width, width_so_far + spacing);
let height = y as u32 + spacing;
PreparedText {
words,
dim: (width, height),
color: text.color,
background: text.background.map(|(r, g, b, a)| Color::RGBA(r, g, b, a)),
padding: text.padding,
border: text
.border
.map(|(w, (r, g, b, a))| (w, Color::RGBA(r, g, b, a))),
}
}
fn find_glyph_region(c: char, metrics: &Vec<GlyphRegion>) -> Option<&GlyphRegion> {
let ascii_index = c as usize;
if ascii_index >= 32 && ascii_index <= 126 {
metrics.get(ascii_index - 32)
} else {
None
}
}
fn to_string(s: impl ToString) -> String {
s.to_string()
}
fn draw_background(
cvs: &mut WindowCanvas,
target_texture: &mut Texture,
color: Color,
x: i32,
y: i32,
w: u32,
h: u32,
) -> Result<(), String> {
cvs.with_texture_canvas(target_texture, |texture_canvas| {
texture_canvas.set_blend_mode(BlendMode::Blend); // TODO test performance impact
texture_canvas.set_draw_color(color);
texture_canvas.fill_rect(Rect::new(x, y, w, h)).unwrap();
})
.map_err(to_string)
}
fn draw_border(
cvs: &mut WindowCanvas,
target_texture: &mut Texture,
color: Color,
bw: u32,
x: i32,
y: i32,
w: u32,
h: u32,
) -> Result<(), String> {
let xl = x;
let xr = x + w as i32 - bw as i32;
let yt = y;
let yb = y + h as i32 - bw as i32;
cvs.with_texture_canvas(target_texture, |texture_canvas| {
texture_canvas.set_draw_color(color);
texture_canvas.fill_rect(Rect::new(xl, yt, w, bw)).unwrap(); // top
texture_canvas.fill_rect(Rect::new(xl, yt, bw, h)).unwrap(); // left
texture_canvas.fill_rect(Rect::new(xr, yt, bw, h)).unwrap(); // right
texture_canvas.fill_rect(Rect::new(xl, yb, w, bw)).unwrap(); // bottom
})
.map_err(to_string)
}
fn draw_text(
text: PreparedText,
cvs: &mut WindowCanvas,
texture: &mut Texture,
target: &mut Texture,
(x, y, w, h): (i32, i32, u32, u32),
) -> Result<(), String> {
if let Some(color) = text.background {
draw_background(cvs, target, color, 0, 0, w, h)?;
}
if let Some((bw, border_color)) = text.border {
draw_border(cvs, target, border_color, bw, 0, 0, w, h)?;
}
let shift = text.border.map(|(val, _)| val).unwrap_or(0) as i32 + text.padding as i32;
texture.set_alpha_mod(text.color.3);
texture.set_color_mod(text.color.0, text.color.1, text.color.2);
for ((offset_x, offset_y), word) in text.words.iter() {
word.draw(texture, cvs, target, (shift + offset_x, shift + offset_y))?;
}
texture.set_alpha_mod(255);
texture.set_color_mod(0, 0, 0);
cvs.copy(&target, Rect::new(0, 0, w, h), Rect::new(x, y, w, h))?;
Ok(())
}
[FIX] fixed artefacts when rendering texts without transparent backgrounds
use std::cmp::max;
use std::collections::HashMap;
use std::string::ToString;
use sdl2::pixels::Color;
use sdl2::rect::Rect;
use sdl2::render::{BlendMode, Texture, TextureCreator, WindowCanvas};
use sdl2::surface::Surface;
use sdl2::ttf::Font as Sdl2Font;
use sdl2::video::WindowContext;
use crate::ui::types::ScreenText;
const ASCII: &str = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
pub struct Font<'a> {
texture: Texture<'a>,
glyphs: Vec<GlyphRegion>,
line_height: u32,
space_advance: i32,
texture_creator: &'a TextureCreator<WindowContext>,
cached_texts: HashMap<String, (Texture<'a>, u32, u32)>,
}
struct GlyphRegion {
start: i32,
advance: i32,
width: u32,
height: u32,
}
impl<'a> Font<'a> {
pub fn from_font(
texture_creator: &'a TextureCreator<sdl2::video::WindowContext>,
font: Sdl2Font,
) -> Result<Self, String> {
let mut total_width = 0;
let mut total_height = 0;
let mut glyphs: Vec<GlyphRegion> = Vec::new();
let mut space_advance = 0;
for c in ASCII.chars() {
if let Some(metric) = font.find_glyph_metrics(c) {
let (w, h) = font.size_of_char(c).map_err(to_string)?;
glyphs.push(GlyphRegion {
start: total_width as i32,
width: w,
height: h,
advance: metric.advance,
});
if c == ' ' {
space_advance = metric.advance;
}
total_width += w;
total_height = h;
} else {
return Err(format!("Unsupported character: {}", c));
}
}
let mut font_canvas = Surface::new(
total_width,
total_height,
texture_creator.default_pixel_format(),
)?
.into_canvas()?;
let font_texture_creator = font_canvas.texture_creator();
let mut x = 0;
for (i, c) in ASCII.char_indices() {
let GlyphRegion { width, .. } = glyphs[i];
let char_surface = font
.render(&c.to_string())
.blended(Color::RGBA(255, 255, 255, 255))
.map_err(to_string)?;
let char_tex = font_texture_creator
.create_texture_from_surface(&char_surface)
.map_err(to_string)?;
let target = Rect::new(x, 0, width, total_height);
font_canvas.copy(&char_tex, None, Some(target))?;
x += width as i32;
}
let texture = texture_creator
.create_texture_from_surface(font_canvas.into_surface())
.map_err(to_string)?;
Ok(Font {
texture,
glyphs,
line_height: total_height,
space_advance,
texture_creator,
cached_texts: HashMap::new(),
})
}
pub fn draw(&mut self, screen_txt: ScreenText, cvs: &mut WindowCanvas) -> Result<(), String> {
let cache_key = screen_txt.text.to_string();
if let Some((tex, w, h)) = self.cached_texts.get(&cache_key) {
let (x, y) = screen_txt.pos.to_xy();
return cvs.copy(tex, Rect::new(0, 0, *w, *h), Rect::new(x, y, *w, *h));
}
let (x, y) = screen_txt.pos.to_xy();
let prepared_text = prepare(screen_txt, self);
let (w, h) = prepared_text.dim;
let pixel_format = self.texture_creator.default_pixel_format();
let mut target_tex = self
.texture_creator
// !!! ATTENSION: there seems to be a very wierd issue if texture is to small
// the background/transparency of small textures is broken
.create_texture_target(pixel_format, max(w, 65), max(h, 65))
.map_err(to_string)?;
target_tex.set_blend_mode(BlendMode::Blend);
draw_text(
prepared_text,
cvs,
&mut self.texture,
&mut target_tex,
(x, y, w, h),
)?;
self.cached_texts.insert(cache_key, (target_tex, w, h));
Ok(())
}
}
struct PreparedWord {
chars: Vec<(i32, i32, u32, u32)>,
width: u32,
}
impl PreparedWord {
fn prepare(glyphs: &Vec<GlyphRegion>, txt: &str) -> Self {
let mut x = 0;
let mut chars = Vec::new();
for c in txt.chars() {
if let Some(r) = find_glyph_region(c, glyphs) {
chars.push((r.start, r.advance, r.width, r.height));
x = x + r.advance;
}
}
PreparedWord {
chars,
width: x as u32,
}
}
fn draw(
self: &Self,
texture: &Texture,
cvs: &mut WindowCanvas,
target: &mut Texture,
pos: (i32, i32),
) -> Result<(), String> {
let (mut x, y) = pos;
for (start, advance, width, height) in self.chars.iter() {
let from = Rect::new(*start, 0, *width, *height);
let to = Rect::new(x, y, *width, *height);
cvs.with_texture_canvas(target, |texture_canvas| {
texture_canvas.copy(&texture, Some(from), Some(to)).unwrap();
})
.map_err(to_string)?;
x = x + advance;
}
Ok(())
}
}
struct PreparedText {
words: Vec<((i32, i32), PreparedWord)>,
dim: (u32, u32),
color: (u8, u8, u8, u8),
background: Color,
// background: Option<Color>,
padding: u32,
border: Option<(u32, Color)>,
}
fn prepare<'a>(text: ScreenText, font: &'a Font) -> PreparedText {
let (mut x, mut y) = (0, 0);
let mut words = Vec::new();
let mut width_so_far: u32 = 0;
let border_width = text.border.map(|(w, _)| w).unwrap_or(0);
let spacing = 2 * text.padding + 2 * border_width;
let max_width = text.max_width - spacing;
for line in text.text.into_string().lines() {
for t in line.split_whitespace() {
let word = PreparedWord::prepare(&font.glyphs, t);
let text_width = word.width;
let advance = font.space_advance + text_width as i32;
if x > 0 && (x + advance) as u32 > max_width {
// text does not fit in current line
// => wrap text (no wrap if first word in line)
x = 0;
y += font.line_height as i32;
width_so_far = max_width;
}
words.push(((x, y), word));
x += advance;
if x as u32 > width_so_far {
width_so_far = x as u32;
}
}
x = 0;
y += font.line_height as i32;
}
let width = max(text.min_width, width_so_far + spacing);
let height = y as u32 + spacing;
PreparedText {
words,
dim: (width, height),
color: text.color,
background: text.background.map(|(r, g, b, a)| Color::RGBA(r, g, b, a)).unwrap_or(Color::RGBA(0, 0, 0, 0)),
padding: text.padding,
border: text
.border
.map(|(w, (r, g, b, a))| (w, Color::RGBA(r, g, b, a))),
}
}
fn find_glyph_region(c: char, metrics: &Vec<GlyphRegion>) -> Option<&GlyphRegion> {
let ascii_index = c as usize;
if ascii_index >= 32 && ascii_index <= 126 {
metrics.get(ascii_index - 32)
} else {
None
}
}
fn to_string(s: impl ToString) -> String {
s.to_string()
}
fn draw_background(
cvs: &mut WindowCanvas,
target_texture: &mut Texture,
color: Color,
x: i32,
y: i32,
w: u32,
h: u32,
) -> Result<(), String> {
cvs.with_texture_canvas(target_texture, |texture_canvas| {
texture_canvas.set_blend_mode(BlendMode::Blend); // TODO test performance impact
texture_canvas.set_draw_color(color);
texture_canvas.fill_rect(Rect::new(x, y, w, h)).unwrap();
})
.map_err(to_string)
}
fn draw_border(
cvs: &mut WindowCanvas,
target_texture: &mut Texture,
color: Color,
bw: u32,
x: i32,
y: i32,
w: u32,
h: u32,
) -> Result<(), String> {
let xl = x;
let xr = x + w as i32 - bw as i32;
let yt = y;
let yb = y + h as i32 - bw as i32;
cvs.with_texture_canvas(target_texture, |texture_canvas| {
texture_canvas.set_draw_color(color);
texture_canvas.fill_rect(Rect::new(xl, yt, w, bw)).unwrap(); // top
texture_canvas.fill_rect(Rect::new(xl, yt, bw, h)).unwrap(); // left
texture_canvas.fill_rect(Rect::new(xr, yt, bw, h)).unwrap(); // right
texture_canvas.fill_rect(Rect::new(xl, yb, w, bw)).unwrap(); // bottom
})
.map_err(to_string)
}
fn draw_text(
text: PreparedText,
cvs: &mut WindowCanvas,
texture: &mut Texture,
target: &mut Texture,
(x, y, w, h): (i32, i32, u32, u32),
) -> Result<(), String> {
draw_background(cvs, target, text.background, 0, 0, w, h)?;
if let Some((bw, border_color)) = text.border {
draw_border(cvs, target, border_color, bw, 0, 0, w, h)?;
}
let shift = text.border.map(|(val, _)| val).unwrap_or(0) as i32 + text.padding as i32;
texture.set_alpha_mod(text.color.3);
texture.set_color_mod(text.color.0, text.color.1, text.color.2);
for ((offset_x, offset_y), word) in text.words.iter() {
word.draw(texture, cvs, target, (shift + offset_x, shift + offset_y))?;
}
texture.set_alpha_mod(255);
texture.set_color_mod(0, 0, 0);
cvs.copy(&target, Rect::new(0, 0, w, h), Rect::new(x, y, w, h))?;
Ok(())
}
|
use std::cmp;
use std::cell::RefCell;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::iter;
use std::error::Error;
use std::ascii::AsciiExt;
use itertools::Itertools;
use std::borrow::Cow;
use std::rc::Rc;
use rustbox::{RustBox};
use rustbox::keyboard::Key;
use rex_utils;
use rex_utils::split_vec::SplitVec;
use rex_utils::rect::Rect;
use rex_utils::relative_rect::{RelativeRect, RelativePos, RelativeSize};
use super::super::config::Config;
use super::RustBoxEx::{RustBoxEx, Style};
use super::input::Input;
use super::widget::Widget;
use super::inputline::{GotoInputLine, FindInputLine, PathInputLine, ConfigSetLine};
use super::overlay::OverlayText;
use super::config::ConfigScreen;
use super::menu::{OverlayMenu, MenuState, MenuEntry};
#[derive(Debug)]
enum EditOp {
Delete(isize, isize),
Insert(isize, Vec<u8>),
Write(isize, Vec<u8>),
}
#[derive(Debug)]
enum LineNumberMode {
None,
Short,
Long
}
static OVERLAY_LAYOUT : RelativeRect<isize> = RelativeRect {
top: RelativePos::FromStart(0),
left: RelativePos::FromStart(0),
width: RelativeSize::Relative(0),
height: RelativeSize::Relative(0),
};
static INPUTLINE_LAYOUT : RelativeRect<isize> = RelativeRect {
top: RelativePos::FromEnd(1),
left: RelativePos::FromStart(0),
width: RelativeSize::Relative(0),
height: RelativeSize::Absolute(1),
};
#[derive(Copy,Clone,Debug)]
pub enum HexEditActions {
Edit(char),
SwitchView,
MoveLeft,
MoveRight,
MoveUp,
MoveDown,
MovePageUp,
MovePageDown,
MoveToFirstColumn,
MoveToLastColumn,
Delete,
DeleteWithMove,
CopySelection,
CutSelection,
PasteSelection,
Undo,
ToggleInsert,
ToggleSelecion,
HelpView,
LogView,
AskGoto,
AskFind,
AskOpen,
AskSave,
AskConfig,
AskMarkAdd,
AskMarkGoto,
StartMenu,
}
static ROOT_ENTRIES: MenuState<HexEditActions> = &[
MenuEntry::CommandEntry('c', "Config", HexEditActions::AskConfig),
MenuEntry::SubEntries('m', "Mark", &[
MenuEntry::CommandEntry('a', "Add", HexEditActions::AskMarkAdd),
MenuEntry::CommandEntry('g', "Goto", HexEditActions::AskMarkGoto),
]),
];
signalreceiver_decl!{HexEditSignalReceiver(HexEdit)}
pub struct HexEdit {
buffer: SplitVec,
config: Rc<RefCell<Config>>,
rect: Rect<isize>,
cursor_nibble_pos: isize,
status_log: Vec<String>,
show_last_status: bool,
data_offset: isize,
row_offset: isize,
nibble_active: bool,
selection_start: Option<isize>,
insert_mode: bool,
input: Input,
undo_stack: Vec<EditOp>,
child_widget: Option<(Box<Widget>, RelativeRect<isize>)>,
cur_path: Option<PathBuf>,
clipboard: Option<Vec<u8>>,
signal_receiver: Rc<HexEditSignalReceiver>,
}
impl HexEdit {
pub fn new(config: Config) -> HexEdit {
HexEdit {
buffer: SplitVec::new(),
config: Rc::new(RefCell::new(config)),
rect: Default::default(),
cursor_nibble_pos: 0,
data_offset: 0,
row_offset: 0,
status_log: vec!["Press C-/ for help".to_string()],
show_last_status: true,
nibble_active: true,
selection_start: None,
insert_mode: false,
child_widget: None,
undo_stack: Vec::new(),
cur_path: None,
clipboard: None,
input: Input::new(),
signal_receiver: Rc::new(HexEditSignalReceiver::new()),
}
}
fn reset(&mut self) {
self.cursor_nibble_pos = 0;
self.data_offset = 0;
self.nibble_active = true;
self.selection_start = None;
self.insert_mode = false;
self.child_widget = None;
self.undo_stack = Vec::new();
}
fn get_linenumber_mode(&self) -> LineNumberMode {
if !self.config.borrow().show_linenum {
LineNumberMode::None
} else if self.buffer.len() <= 0xFFFF {
LineNumberMode::Short
} else {
LineNumberMode::Long
}
}
fn get_linenumber_width(&self) -> isize {
match self.get_linenumber_mode() {
LineNumberMode::None => 1,
LineNumberMode::Short => 4 + 1, // 4 for the XXXX + 1 for whitespace
LineNumberMode::Long => 9 + 1, // 7 for XXXX:XXXX + 1 for whitespace
}
}
fn get_line_width(&self) -> isize {
self.config.borrow().line_width.unwrap_or(self.get_bytes_per_row() as u32) as isize
}
fn get_bytes_per_row(&self) -> isize {
// This is the number of cells on the screen that are used for each byte.
// For the nibble view, we need 3 (1 for each nibble and 1 for the spacing). For
// the ascii view, if it is shown, we need another one.
let cells_per_byte = if self.config.borrow().show_ascii { 4 } else { 3 };
(self.rect.width - self.get_linenumber_width()) / cells_per_byte
}
fn get_bytes_per_screen(&self) -> isize {
self.get_line_width() * self.rect.height
}
fn draw_line_number(&self, rb: &RustBox, row: usize, line_number: usize) {
match self.get_linenumber_mode() {
LineNumberMode::None => (),
LineNumberMode::Short => {
rb.print_style(0, row, Style::Default, &format!("{:04X}", line_number));
}
LineNumberMode::Long => {
rb.print_style(0, row, Style::Default, &format!("{:04X}:{:04X}", line_number >> 16, line_number & 0xFFFF));
}
};
}
fn draw_line(&self, rb: &RustBox, iter: &mut Iterator<Item=(usize, Option<&u8>)>, row: usize) {
let nibble_view_start = self.get_linenumber_width() as usize;
// The value of this is wrong if we are not showing the ascii view
let byte_view_start = nibble_view_start + self.get_bytes_per_row() as usize * 3;
// We want the selection draw to not go out of the editor view
let mut prev_in_selection = false;
let mut at_current_row = false;
for (row_offset, (byte_pos, maybe_byte)) in iter.skip(self.row_offset as usize).enumerate().take(self.get_bytes_per_row() as usize) {
let at_current_byte = byte_pos as isize == (self.cursor_nibble_pos / 2);
at_current_row = at_current_row || at_current_byte;
let in_selection = if let Some(selection_pos) = self.selection_start {
rex_utils::is_between(byte_pos as isize, selection_pos, self.cursor_nibble_pos / 2)
} else {
false
};
// Now we draw the nibble view
let hex_chars = if let Some(&byte) = maybe_byte {
rex_utils::u8_to_hex(byte)
} else {
(' ', ' ')
};
let nibble_view_column = nibble_view_start + (row_offset * 3);
let nibble_style = if (!self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(nibble_view_column, row, nibble_style,
hex_chars.0);
rb.print_char_style(nibble_view_column + 1, row, nibble_style,
hex_chars.1);
if prev_in_selection && in_selection {
rb.print_char_style(nibble_view_column - 1, row, nibble_style,
' ');
}
if self.nibble_active && self.child_widget.is_none() && at_current_byte {
rb.set_cursor(nibble_view_column as isize + (self.cursor_nibble_pos & 1),
row as isize);
};
if self.config.borrow().show_ascii {
// Now let's draw the byte window
let byte_char = if let Some(&byte) = maybe_byte {
let bc = byte as char;
if bc.is_ascii() && bc.is_alphanumeric() {
bc
} else {
'.'
}
} else {
' '
};
// If we are at the current byte but the nibble view is active, we want to draw a
// "fake" cursor by dawing a selection square
let byte_style = if (self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(byte_view_start + row_offset, row, byte_style,
byte_char);
if !self.nibble_active && self.child_widget.is_none() && at_current_byte {
rb.set_cursor((byte_view_start + row_offset) as isize, row as isize);
}
// Remember if we had a selection, so that we know for next char to "fill in" with
// selection in the nibble view
prev_in_selection = in_selection;
}
}
// We just need to consume the iterator and see if there were any remaining bytes
let bytes_remaining = iter.count();
if at_current_row && self.row_offset != 0 {
rb.print_char_style(nibble_view_start - 1, row, Style::Default, '<');
}
if at_current_row && bytes_remaining != 0 {
rb.print_char_style(byte_view_start - 1, row, Style::Default, '>');
}
}
pub fn draw_view(&self, rb: &RustBox) {
let start_iter = self.data_offset as usize;
let stop_iter = cmp::min(start_iter + self.get_bytes_per_screen() as usize, self.buffer.len());
let itit = (start_iter..).zip( // We are zipping the byte position
self.buffer.iter_range(start_iter..stop_iter) // With the data at those bytes
.map(|x| Some(x)) // And wrapping it in an option
.chain(iter::once(None))) // So we can have a "fake" last item that will be None
.chunks_lazy(self.get_line_width() as usize); //And split it into nice row-sized chunks
for (row, row_iter_) in itit.into_iter().take(self.rect.height as usize).enumerate() {
// We need to be able to peek in the iterable so we can get the current position
let mut row_iter = row_iter_.peekable();
let byte_pos = row_iter.peek().unwrap().0;
self.draw_line_number(rb, row, byte_pos);
self.draw_line(rb, &mut row_iter, row);
}
}
fn draw_statusbar(&self, rb: &RustBox) {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &rex_utils::string_with_repeat(' ', rb.width()));
if self.show_last_status {
if let Some(ref status_line) = self.status_log.last() {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &status_line);
}
}
let mode = if let Some(_) = self.selection_start {
"SEL"
} else if self.insert_mode {
"INS"
} else {
"OVR"
};
let right_status;
if let Some(selection_start) = self.selection_start {
let size = (self.cursor_nibble_pos/2 - selection_start).abs();
right_status = format!(
" Start: {} Size: {} Pos: {} {}",
selection_start, size, self.cursor_nibble_pos/2, mode);
} else {
right_status = format!(
" Pos: {} Undo: {} {}",
self.undo_stack.len(), self.cursor_nibble_pos/2, mode);
};
let (x_pos, start_index) = if rb.width() >= right_status.len() {
(rb.width() - right_status.len(), 0)
} else {
(0, right_status.len() - rb.width())
};
rb.print_style(x_pos, rb.height() - 1, Style::StatusBar, &right_status[start_index..]);
}
pub fn draw(&mut self, rb: &RustBox) {
self.draw_view(rb);
if let Some(&mut (ref mut child_widget, ref layout)) = self.child_widget.as_mut() {
child_widget.draw(rb, layout.get_absolute_to(self.rect), true);
}
self.draw_statusbar(rb);
}
fn status<S: Into<Cow<'static, str>> + ?Sized>(&mut self, st: S) {
self.show_last_status = true;
let cow: Cow<'static, str> = st.into();
self.status_log.push(format!("{}", &cow));
}
fn clear_status(&mut self) {
self.show_last_status = false;
}
pub fn open(&mut self, path: &Path) {
let mut v = vec![];
if let Err(e) = File::open(path).and_then(|mut f| f.read_to_end(&mut v)) {
self.status(format!("ERROR: {}", e.description()));
return;
}
self.buffer = SplitVec::from_vec(v);
self.cur_path = Some(PathBuf::from(path));
self.reset();
}
pub fn save(&mut self, path: &Path) {
let result = File::create(path)
.and_then(|mut f| self.buffer.iter_slices()
.fold(Ok(()), |res, val| res
.and_then(|_| f.write_all(val))));
match result {
Ok(_) => {
self.cur_path = Some(PathBuf::from(path));
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
fn edit_buffer(&mut self, act: EditOp, add_to_undo: bool) -> (isize, isize) {
let mut begin_region: isize;
let mut end_region: isize;
match act {
EditOp::Insert(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
self.buffer.insert(offset as usize, &buf);
if add_to_undo {
self.push_undo(EditOp::Delete(offset, offset + buf.len() as isize))
}
}
EditOp::Delete(offset, end) => {
begin_region = offset;
end_region = end;
let res = self.buffer.move_out(offset as usize..end as usize);
if add_to_undo { self.push_undo(EditOp::Insert(offset, res)) }
}
EditOp::Write(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
let orig_data = self.buffer.copy_out(offset as usize..(offset as usize + buf.len()));
self.buffer.copy_in(offset as usize, &buf);
if add_to_undo { self.push_undo(EditOp::Write(offset, orig_data)) }
}
}
(begin_region, end_region)
}
fn push_undo(&mut self, act: EditOp) {
self.undo_stack.push(act);
}
fn undo(&mut self) {
if let Some(act) = self.undo_stack.pop() {
let (begin, _) = self.edit_buffer(act, false);
self.set_cursor(begin * 2);
}
}
fn cursor_at_end(&self) -> bool {
self.cursor_nibble_pos == (self.buffer.len()*2) as isize
}
fn delete_at_cursor(&mut self, with_bksp: bool) {
let mut cursor_nibble_pos = self.cursor_nibble_pos;
let selection_pos = match self.selection_start {
Some(selection_pos_tag) => selection_pos_tag,
None => {
if with_bksp {
if cursor_nibble_pos < 2 {
return;
}
cursor_nibble_pos -= 2;
}
cursor_nibble_pos / 2
}
};
let del_start = cmp::min(selection_pos, cursor_nibble_pos / 2);
let mut del_stop = cmp::max(selection_pos, cursor_nibble_pos / 2) + 1;
if del_stop > self.buffer.len() as isize {
del_stop -= 1;
if del_stop == del_start {
return;
}
}
if self.buffer.len() == 0 {
self.status("Nothing to delete");
return;
}
self.selection_start = None;
self.edit_buffer(EditOp::Delete(del_start, del_stop), true);
self.set_cursor(del_start * 2);
}
fn write_nibble_at_cursor(&mut self, c: u8) {
// Replace the text at the selection before writing the data
if self.selection_start.is_some() {
self.delete_at_cursor(false);
}
if self.insert_mode || self.cursor_at_end() {
self.insert_nibble_at_cursor(c);
} else {
self.set_nibble_at_cursor(c);
}
}
fn set_nibble_at_cursor(&mut self, c: u8) {
let mut byte = self.buffer[(self.cursor_nibble_pos / 2) as usize];
byte = match self.cursor_nibble_pos & 1 {
0 => (byte & 0x0f) + c * 16,
1 => (byte & 0xf0) + c,
_ => 0xff,
};
let byte_offset = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Write(byte_offset, vec![byte]), true);
}
fn insert_nibble_at_cursor(&mut self, c: u8) {
// If we are at half byte, we still overwrite
if self.cursor_nibble_pos & 1 == 1 {
self.set_nibble_at_cursor(c);
return
}
let pos_div2 = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Insert(pos_div2, vec![c * 16]), true);
}
fn toggle_insert_mode(&mut self) {
self.insert_mode = !self.insert_mode;
self.move_cursor(0);
}
fn write_byte_at_cursor(&mut self, c: u8) {
// Replace the text at the selection before writing the data
if self.selection_start.is_some() {
self.delete_at_cursor(false);
}
let byte_offset = self.cursor_nibble_pos / 2;
if self.insert_mode || self.cursor_at_end() {
self.edit_buffer(EditOp::Insert(byte_offset, vec![c]), true);
} else {
self.edit_buffer(EditOp::Write(byte_offset, vec![c]), true);
}
}
fn move_cursor(&mut self, pos: isize) {
self.cursor_nibble_pos += pos;
self.update_cursor()
}
fn set_cursor(&mut self, pos: isize) {
self.cursor_nibble_pos = pos;
self.update_cursor()
}
fn update_cursor(&mut self) {
self.cursor_nibble_pos = cmp::max(self.cursor_nibble_pos, 0);
self.cursor_nibble_pos = cmp::min(self.cursor_nibble_pos, (self.buffer.len()*2) as isize);
let cursor_byte_pos = self.cursor_nibble_pos / 2;
let cursor_row_offset = cursor_byte_pos % self.get_line_width();
// If the cursor moves above or below the view, scroll it
if cursor_byte_pos < self.data_offset {
self.data_offset = (cursor_byte_pos) - cursor_row_offset;
}
if cursor_byte_pos > (self.data_offset + self.get_bytes_per_screen() - 1) {
self.data_offset = cursor_byte_pos - cursor_row_offset -
self.get_bytes_per_screen() + self.get_line_width();
}
// If the cursor moves to the right or left of the view, scroll it
if cursor_row_offset < self.row_offset {
self.row_offset = cursor_row_offset;
}
if cursor_row_offset >= self.row_offset + self.get_bytes_per_row() {
self.row_offset = cursor_row_offset - self.get_bytes_per_row() + 1;
}
}
fn toggle_selection(&mut self) {
match self.selection_start {
Some(_) => self.selection_start = None,
None => self.selection_start = Some(self.cursor_nibble_pos / 2)
}
}
fn goto(&mut self, pos: isize) {
self.status(format!("Going to {:?}", pos));
self.set_cursor(pos * 2);
}
fn find_buf(&mut self, needle: &[u8]) {
let found_pos = match self.buffer.find_slice_from((self.cursor_nibble_pos / 2) as usize, needle) {
None => {
self.buffer.find_slice_from(0, needle)
}
a => a
};
if let Some(pos) = found_pos {
self.status(format!("Found at {:?}", pos));
self.set_cursor((pos * 2) as isize);
} else {
self.status("Nothing found!");
}
}
fn read_cursor_to_clipboard(&mut self) -> Option<usize> {
let (start, stop) = match self.selection_start {
None => { return None; },
Some(selection_pos) => {
(cmp::min(selection_pos, self.cursor_nibble_pos / 2),
cmp::max(selection_pos, self.cursor_nibble_pos / 2))
}
};
let data = self.buffer.copy_out(start as usize..stop as usize);
let data_len = data.len();
self.clipboard = Some(data);
Some(data_len)
}
fn edit_copy(&mut self) {
if let Some(data_len) = self.read_cursor_to_clipboard() {
self.status(format!("Copied {}", data_len));
self.selection_start = None;
}
}
fn edit_cut(&mut self) {
if let Some(data_len) = self.read_cursor_to_clipboard() {
self.delete_at_cursor(false);
self.status(format!("Cut {}", data_len));
}
}
fn edit_paste(&mut self) {
let data = if let Some(ref d) = self.clipboard {
d.clone()
} else {
return;
};
let data_len = data.len() as isize;
// This is needed to satisfy the borrow checker
let cur_pos_in_bytes = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Insert(cur_pos_in_bytes, data), true);
self.move_cursor(data_len + 1);
}
fn view_input(&mut self, key: Key) {
if let Some(action) = self.input.editor_input(key) {
self.do_action(action)
}
}
fn do_action(&mut self, action: HexEditActions) {
self.clear_status();
match action {
// Movement
HexEditActions::MoveLeft if self.nibble_active => self.move_cursor(-1),
HexEditActions::MoveRight if self.nibble_active => self.move_cursor(1),
HexEditActions::MoveLeft if !self.nibble_active => self.move_cursor(-2),
HexEditActions::MoveRight if !self.nibble_active => self.move_cursor(2),
HexEditActions::MoveLeft => panic!("Make the case handler happy!"),
HexEditActions::MoveRight => panic!("Make the case handler happy!"),
HexEditActions::MoveUp => {
let t = -self.get_line_width() * 2;
self.move_cursor(t)
}
HexEditActions::MoveDown => {
let t = self.get_line_width() * 2;
self.move_cursor(t)
}
HexEditActions::MovePageUp => {
let t = -(self.get_bytes_per_screen() * 2);
self.move_cursor(t)
}
HexEditActions::MovePageDown => {
let t = self.get_bytes_per_screen() * 2;
self.move_cursor(t)
}
HexEditActions::MoveToFirstColumn => {
let pos_in_line = self.cursor_nibble_pos % (self.get_line_width()*2);
self.move_cursor(-pos_in_line)
}
HexEditActions::MoveToLastColumn => {
let pos_in_line = self.cursor_nibble_pos % (self.get_line_width()*2);
let i = self.get_line_width()*2 - 2 - pos_in_line;
self.move_cursor(i);
}
HexEditActions::Delete => self.delete_at_cursor(false),
HexEditActions::DeleteWithMove => self.delete_at_cursor(true),
// Ctrl X, C V
HexEditActions::CutSelection => self.edit_cut(),
HexEditActions::CopySelection => self.edit_copy(),
HexEditActions::PasteSelection => self.edit_paste(),
// Hex input for nibble view
HexEditActions::Edit(ch) if self.nibble_active => {
if let Some(val) = ch.to_digit(16) {
self.write_nibble_at_cursor(val as u8);
self.move_cursor(1);
} else {
// TODO: Show error?
}
},
// Ascii edit for byte view
HexEditActions::Edit(ch) if !self.nibble_active => {
if ch.len_utf8() == 1 && ch.is_alphanumeric() {
// TODO: Make it printable rather than alphanumeric
self.write_byte_at_cursor(ch as u8);
self.move_cursor(2);
} else {
// TODO: Show error?
}
}
HexEditActions::Edit(_) => panic!("Make the case handler happy!"),
HexEditActions::SwitchView => {
self.nibble_active = !self.nibble_active;
},
HexEditActions::HelpView => self.start_help(),
HexEditActions::LogView => self.start_logview(),
HexEditActions::ToggleInsert => self.toggle_insert_mode(),
HexEditActions::ToggleSelecion => self.toggle_selection(),
HexEditActions::Undo => self.undo(),
HexEditActions::AskGoto => self.start_goto(),
HexEditActions::AskFind => self.start_find(),
HexEditActions::AskOpen => self.start_open(),
HexEditActions::AskSave => self.start_save(),
HexEditActions::AskConfig => self.start_config(),
HexEditActions::StartMenu => self.start_menu(),
_ => self.status(format!("Operation not implemented yet: {:?}", action))
}
}
fn start_menu(&mut self) {
let sr = &self.signal_receiver;
let mut menu = OverlayMenu::with_menu(ROOT_ENTRIES);
menu.on_selected.connect(signal!(sr with |obj, action| {
obj.child_widget = None;
obj.do_action(action);
}));
menu.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(menu), OVERLAY_LAYOUT));
}
fn start_config(&mut self) {
let sr = &self.signal_receiver;
let mut config_screen = ConfigScreen::with_config(self.config.clone());
config_screen.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
config_screen.on_selected.connect(signal!(sr with |obj, conf_name| {
obj.child_widget = None;
obj.start_config_edit(conf_name);
}));
self.child_widget = Some((Box::new(config_screen), OVERLAY_LAYOUT));
}
fn start_config_edit(&mut self, conf_name: &'static str) {
let sr = &self.signal_receiver;
let mut config_set = ConfigSetLine::new(format!("{} = ", conf_name));
config_set.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
config_set.on_done.connect(signal!(sr with |obj, config_value| {
obj.child_widget = None;
obj.set_config(conf_name, &config_value);
}));
self.child_widget = Some((Box::new(config_set), INPUTLINE_LAYOUT));
}
fn set_config(&mut self, key: &str, val: &str) {
self.config.borrow_mut().set_from_key_value(key, &val);
}
fn start_help(&mut self) {
let help_text = include_str!("Help.txt");
// YAY Lifetimes! (This will hopfully be fixed once rust gains MIR/HIR)
{
let sr = &self.signal_receiver;
let mut ot = OverlayText::with_text(help_text.to_string(), false);
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(ot), OVERLAY_LAYOUT));
}
{
self.status("Press Esc to return");
}
}
fn start_logview(&mut self) {
let logs = self.status_log.clone();
let sr = &self.signal_receiver;
let mut ot = OverlayText::with_logs(logs, true);
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(ot), OVERLAY_LAYOUT));
}
fn start_goto(&mut self) {
let mut gt = GotoInputLine::new();
let sr = &self.signal_receiver;
gt.on_done.connect(signal!(sr with |obj, pos| {
obj.child_widget = None;
obj.goto(pos*2);
}));
gt.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(gt) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_find(&mut self) {
let mut find_line = FindInputLine::new();
let sr = &self.signal_receiver;
find_line.on_find.connect(signal!(sr with |obj, needle| {
obj.child_widget = None;
obj.find_buf(&needle);
}));
find_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(find_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_save(&mut self) {
let mut path_line = PathInputLine::new("Save: ".into());
let sr = &self.signal_receiver;
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.child_widget = None;
obj.save(&path);
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(path_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_open(&mut self) {
let mut path_line = PathInputLine::new("Open: ".into());
let sr = &self.signal_receiver;
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.child_widget = None;
obj.open(&path);
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(path_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn process_msgs(&mut self) {
let sr = self.signal_receiver.clone();
sr.run(self);
}
pub fn input(&mut self, key: Key) {
self.process_msgs();
if let Some((ref mut child_widget, _)) = self.child_widget {
child_widget.input(&self.input, key);
} else {
self.view_input(key);
}
self.process_msgs();
}
pub fn resize(&mut self, width: i32, height: i32) {
self.rect.height = height as isize - 1; // Substract 1 for the status line on the bottom
self.rect.width = width as isize;
self.update_cursor();
}
}
Display error on failure when setting configuration
This fixes the error result being ignored, and the lint on the ignored
Result value.
use std::cmp;
use std::cell::RefCell;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::iter;
use std::error::Error;
use std::ascii::AsciiExt;
use itertools::Itertools;
use std::borrow::Cow;
use std::rc::Rc;
use rustbox::{RustBox};
use rustbox::keyboard::Key;
use rex_utils;
use rex_utils::split_vec::SplitVec;
use rex_utils::rect::Rect;
use rex_utils::relative_rect::{RelativeRect, RelativePos, RelativeSize};
use super::super::config::Config;
use super::RustBoxEx::{RustBoxEx, Style};
use super::input::Input;
use super::widget::Widget;
use super::inputline::{GotoInputLine, FindInputLine, PathInputLine, ConfigSetLine};
use super::overlay::OverlayText;
use super::config::ConfigScreen;
use super::menu::{OverlayMenu, MenuState, MenuEntry};
#[derive(Debug)]
enum EditOp {
Delete(isize, isize),
Insert(isize, Vec<u8>),
Write(isize, Vec<u8>),
}
#[derive(Debug)]
enum LineNumberMode {
None,
Short,
Long
}
static OVERLAY_LAYOUT : RelativeRect<isize> = RelativeRect {
top: RelativePos::FromStart(0),
left: RelativePos::FromStart(0),
width: RelativeSize::Relative(0),
height: RelativeSize::Relative(0),
};
static INPUTLINE_LAYOUT : RelativeRect<isize> = RelativeRect {
top: RelativePos::FromEnd(1),
left: RelativePos::FromStart(0),
width: RelativeSize::Relative(0),
height: RelativeSize::Absolute(1),
};
#[derive(Copy,Clone,Debug)]
pub enum HexEditActions {
Edit(char),
SwitchView,
MoveLeft,
MoveRight,
MoveUp,
MoveDown,
MovePageUp,
MovePageDown,
MoveToFirstColumn,
MoveToLastColumn,
Delete,
DeleteWithMove,
CopySelection,
CutSelection,
PasteSelection,
Undo,
ToggleInsert,
ToggleSelecion,
HelpView,
LogView,
AskGoto,
AskFind,
AskOpen,
AskSave,
AskConfig,
AskMarkAdd,
AskMarkGoto,
StartMenu,
}
static ROOT_ENTRIES: MenuState<HexEditActions> = &[
MenuEntry::CommandEntry('c', "Config", HexEditActions::AskConfig),
MenuEntry::SubEntries('m', "Mark", &[
MenuEntry::CommandEntry('a', "Add", HexEditActions::AskMarkAdd),
MenuEntry::CommandEntry('g', "Goto", HexEditActions::AskMarkGoto),
]),
];
signalreceiver_decl!{HexEditSignalReceiver(HexEdit)}
pub struct HexEdit {
buffer: SplitVec,
config: Rc<RefCell<Config>>,
rect: Rect<isize>,
cursor_nibble_pos: isize,
status_log: Vec<String>,
show_last_status: bool,
data_offset: isize,
row_offset: isize,
nibble_active: bool,
selection_start: Option<isize>,
insert_mode: bool,
input: Input,
undo_stack: Vec<EditOp>,
child_widget: Option<(Box<Widget>, RelativeRect<isize>)>,
cur_path: Option<PathBuf>,
clipboard: Option<Vec<u8>>,
signal_receiver: Rc<HexEditSignalReceiver>,
}
impl HexEdit {
pub fn new(config: Config) -> HexEdit {
HexEdit {
buffer: SplitVec::new(),
config: Rc::new(RefCell::new(config)),
rect: Default::default(),
cursor_nibble_pos: 0,
data_offset: 0,
row_offset: 0,
status_log: vec!["Press C-/ for help".to_string()],
show_last_status: true,
nibble_active: true,
selection_start: None,
insert_mode: false,
child_widget: None,
undo_stack: Vec::new(),
cur_path: None,
clipboard: None,
input: Input::new(),
signal_receiver: Rc::new(HexEditSignalReceiver::new()),
}
}
fn reset(&mut self) {
self.cursor_nibble_pos = 0;
self.data_offset = 0;
self.nibble_active = true;
self.selection_start = None;
self.insert_mode = false;
self.child_widget = None;
self.undo_stack = Vec::new();
}
fn get_linenumber_mode(&self) -> LineNumberMode {
if !self.config.borrow().show_linenum {
LineNumberMode::None
} else if self.buffer.len() <= 0xFFFF {
LineNumberMode::Short
} else {
LineNumberMode::Long
}
}
fn get_linenumber_width(&self) -> isize {
match self.get_linenumber_mode() {
LineNumberMode::None => 1,
LineNumberMode::Short => 4 + 1, // 4 for the XXXX + 1 for whitespace
LineNumberMode::Long => 9 + 1, // 7 for XXXX:XXXX + 1 for whitespace
}
}
fn get_line_width(&self) -> isize {
self.config.borrow().line_width.unwrap_or(self.get_bytes_per_row() as u32) as isize
}
fn get_bytes_per_row(&self) -> isize {
// This is the number of cells on the screen that are used for each byte.
// For the nibble view, we need 3 (1 for each nibble and 1 for the spacing). For
// the ascii view, if it is shown, we need another one.
let cells_per_byte = if self.config.borrow().show_ascii { 4 } else { 3 };
(self.rect.width - self.get_linenumber_width()) / cells_per_byte
}
fn get_bytes_per_screen(&self) -> isize {
self.get_line_width() * self.rect.height
}
fn draw_line_number(&self, rb: &RustBox, row: usize, line_number: usize) {
match self.get_linenumber_mode() {
LineNumberMode::None => (),
LineNumberMode::Short => {
rb.print_style(0, row, Style::Default, &format!("{:04X}", line_number));
}
LineNumberMode::Long => {
rb.print_style(0, row, Style::Default, &format!("{:04X}:{:04X}", line_number >> 16, line_number & 0xFFFF));
}
};
}
fn draw_line(&self, rb: &RustBox, iter: &mut Iterator<Item=(usize, Option<&u8>)>, row: usize) {
let nibble_view_start = self.get_linenumber_width() as usize;
// The value of this is wrong if we are not showing the ascii view
let byte_view_start = nibble_view_start + self.get_bytes_per_row() as usize * 3;
// We want the selection draw to not go out of the editor view
let mut prev_in_selection = false;
let mut at_current_row = false;
for (row_offset, (byte_pos, maybe_byte)) in iter.skip(self.row_offset as usize).enumerate().take(self.get_bytes_per_row() as usize) {
let at_current_byte = byte_pos as isize == (self.cursor_nibble_pos / 2);
at_current_row = at_current_row || at_current_byte;
let in_selection = if let Some(selection_pos) = self.selection_start {
rex_utils::is_between(byte_pos as isize, selection_pos, self.cursor_nibble_pos / 2)
} else {
false
};
// Now we draw the nibble view
let hex_chars = if let Some(&byte) = maybe_byte {
rex_utils::u8_to_hex(byte)
} else {
(' ', ' ')
};
let nibble_view_column = nibble_view_start + (row_offset * 3);
let nibble_style = if (!self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(nibble_view_column, row, nibble_style,
hex_chars.0);
rb.print_char_style(nibble_view_column + 1, row, nibble_style,
hex_chars.1);
if prev_in_selection && in_selection {
rb.print_char_style(nibble_view_column - 1, row, nibble_style,
' ');
}
if self.nibble_active && self.child_widget.is_none() && at_current_byte {
rb.set_cursor(nibble_view_column as isize + (self.cursor_nibble_pos & 1),
row as isize);
};
if self.config.borrow().show_ascii {
// Now let's draw the byte window
let byte_char = if let Some(&byte) = maybe_byte {
let bc = byte as char;
if bc.is_ascii() && bc.is_alphanumeric() {
bc
} else {
'.'
}
} else {
' '
};
// If we are at the current byte but the nibble view is active, we want to draw a
// "fake" cursor by dawing a selection square
let byte_style = if (self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(byte_view_start + row_offset, row, byte_style,
byte_char);
if !self.nibble_active && self.child_widget.is_none() && at_current_byte {
rb.set_cursor((byte_view_start + row_offset) as isize, row as isize);
}
// Remember if we had a selection, so that we know for next char to "fill in" with
// selection in the nibble view
prev_in_selection = in_selection;
}
}
// We just need to consume the iterator and see if there were any remaining bytes
let bytes_remaining = iter.count();
if at_current_row && self.row_offset != 0 {
rb.print_char_style(nibble_view_start - 1, row, Style::Default, '<');
}
if at_current_row && bytes_remaining != 0 {
rb.print_char_style(byte_view_start - 1, row, Style::Default, '>');
}
}
pub fn draw_view(&self, rb: &RustBox) {
let start_iter = self.data_offset as usize;
let stop_iter = cmp::min(start_iter + self.get_bytes_per_screen() as usize, self.buffer.len());
let itit = (start_iter..).zip( // We are zipping the byte position
self.buffer.iter_range(start_iter..stop_iter) // With the data at those bytes
.map(|x| Some(x)) // And wrapping it in an option
.chain(iter::once(None))) // So we can have a "fake" last item that will be None
.chunks_lazy(self.get_line_width() as usize); //And split it into nice row-sized chunks
for (row, row_iter_) in itit.into_iter().take(self.rect.height as usize).enumerate() {
// We need to be able to peek in the iterable so we can get the current position
let mut row_iter = row_iter_.peekable();
let byte_pos = row_iter.peek().unwrap().0;
self.draw_line_number(rb, row, byte_pos);
self.draw_line(rb, &mut row_iter, row);
}
}
fn draw_statusbar(&self, rb: &RustBox) {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &rex_utils::string_with_repeat(' ', rb.width()));
if self.show_last_status {
if let Some(ref status_line) = self.status_log.last() {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &status_line);
}
}
let mode = if let Some(_) = self.selection_start {
"SEL"
} else if self.insert_mode {
"INS"
} else {
"OVR"
};
let right_status;
if let Some(selection_start) = self.selection_start {
let size = (self.cursor_nibble_pos/2 - selection_start).abs();
right_status = format!(
" Start: {} Size: {} Pos: {} {}",
selection_start, size, self.cursor_nibble_pos/2, mode);
} else {
right_status = format!(
" Pos: {} Undo: {} {}",
self.undo_stack.len(), self.cursor_nibble_pos/2, mode);
};
let (x_pos, start_index) = if rb.width() >= right_status.len() {
(rb.width() - right_status.len(), 0)
} else {
(0, right_status.len() - rb.width())
};
rb.print_style(x_pos, rb.height() - 1, Style::StatusBar, &right_status[start_index..]);
}
pub fn draw(&mut self, rb: &RustBox) {
self.draw_view(rb);
if let Some(&mut (ref mut child_widget, ref layout)) = self.child_widget.as_mut() {
child_widget.draw(rb, layout.get_absolute_to(self.rect), true);
}
self.draw_statusbar(rb);
}
fn status<S: Into<Cow<'static, str>> + ?Sized>(&mut self, st: S) {
self.show_last_status = true;
let cow: Cow<'static, str> = st.into();
self.status_log.push(format!("{}", &cow));
}
fn clear_status(&mut self) {
self.show_last_status = false;
}
pub fn open(&mut self, path: &Path) {
let mut v = vec![];
if let Err(e) = File::open(path).and_then(|mut f| f.read_to_end(&mut v)) {
self.status(format!("ERROR: {}", e.description()));
return;
}
self.buffer = SplitVec::from_vec(v);
self.cur_path = Some(PathBuf::from(path));
self.reset();
}
pub fn save(&mut self, path: &Path) {
let result = File::create(path)
.and_then(|mut f| self.buffer.iter_slices()
.fold(Ok(()), |res, val| res
.and_then(|_| f.write_all(val))));
match result {
Ok(_) => {
self.cur_path = Some(PathBuf::from(path));
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
fn edit_buffer(&mut self, act: EditOp, add_to_undo: bool) -> (isize, isize) {
let mut begin_region: isize;
let mut end_region: isize;
match act {
EditOp::Insert(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
self.buffer.insert(offset as usize, &buf);
if add_to_undo {
self.push_undo(EditOp::Delete(offset, offset + buf.len() as isize))
}
}
EditOp::Delete(offset, end) => {
begin_region = offset;
end_region = end;
let res = self.buffer.move_out(offset as usize..end as usize);
if add_to_undo { self.push_undo(EditOp::Insert(offset, res)) }
}
EditOp::Write(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
let orig_data = self.buffer.copy_out(offset as usize..(offset as usize + buf.len()));
self.buffer.copy_in(offset as usize, &buf);
if add_to_undo { self.push_undo(EditOp::Write(offset, orig_data)) }
}
}
(begin_region, end_region)
}
fn push_undo(&mut self, act: EditOp) {
self.undo_stack.push(act);
}
fn undo(&mut self) {
if let Some(act) = self.undo_stack.pop() {
let (begin, _) = self.edit_buffer(act, false);
self.set_cursor(begin * 2);
}
}
fn cursor_at_end(&self) -> bool {
self.cursor_nibble_pos == (self.buffer.len()*2) as isize
}
fn delete_at_cursor(&mut self, with_bksp: bool) {
let mut cursor_nibble_pos = self.cursor_nibble_pos;
let selection_pos = match self.selection_start {
Some(selection_pos_tag) => selection_pos_tag,
None => {
if with_bksp {
if cursor_nibble_pos < 2 {
return;
}
cursor_nibble_pos -= 2;
}
cursor_nibble_pos / 2
}
};
let del_start = cmp::min(selection_pos, cursor_nibble_pos / 2);
let mut del_stop = cmp::max(selection_pos, cursor_nibble_pos / 2) + 1;
if del_stop > self.buffer.len() as isize {
del_stop -= 1;
if del_stop == del_start {
return;
}
}
if self.buffer.len() == 0 {
self.status("Nothing to delete");
return;
}
self.selection_start = None;
self.edit_buffer(EditOp::Delete(del_start, del_stop), true);
self.set_cursor(del_start * 2);
}
fn write_nibble_at_cursor(&mut self, c: u8) {
// Replace the text at the selection before writing the data
if self.selection_start.is_some() {
self.delete_at_cursor(false);
}
if self.insert_mode || self.cursor_at_end() {
self.insert_nibble_at_cursor(c);
} else {
self.set_nibble_at_cursor(c);
}
}
fn set_nibble_at_cursor(&mut self, c: u8) {
let mut byte = self.buffer[(self.cursor_nibble_pos / 2) as usize];
byte = match self.cursor_nibble_pos & 1 {
0 => (byte & 0x0f) + c * 16,
1 => (byte & 0xf0) + c,
_ => 0xff,
};
let byte_offset = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Write(byte_offset, vec![byte]), true);
}
fn insert_nibble_at_cursor(&mut self, c: u8) {
// If we are at half byte, we still overwrite
if self.cursor_nibble_pos & 1 == 1 {
self.set_nibble_at_cursor(c);
return
}
let pos_div2 = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Insert(pos_div2, vec![c * 16]), true);
}
fn toggle_insert_mode(&mut self) {
self.insert_mode = !self.insert_mode;
self.move_cursor(0);
}
fn write_byte_at_cursor(&mut self, c: u8) {
// Replace the text at the selection before writing the data
if self.selection_start.is_some() {
self.delete_at_cursor(false);
}
let byte_offset = self.cursor_nibble_pos / 2;
if self.insert_mode || self.cursor_at_end() {
self.edit_buffer(EditOp::Insert(byte_offset, vec![c]), true);
} else {
self.edit_buffer(EditOp::Write(byte_offset, vec![c]), true);
}
}
fn move_cursor(&mut self, pos: isize) {
self.cursor_nibble_pos += pos;
self.update_cursor()
}
fn set_cursor(&mut self, pos: isize) {
self.cursor_nibble_pos = pos;
self.update_cursor()
}
fn update_cursor(&mut self) {
self.cursor_nibble_pos = cmp::max(self.cursor_nibble_pos, 0);
self.cursor_nibble_pos = cmp::min(self.cursor_nibble_pos, (self.buffer.len()*2) as isize);
let cursor_byte_pos = self.cursor_nibble_pos / 2;
let cursor_row_offset = cursor_byte_pos % self.get_line_width();
// If the cursor moves above or below the view, scroll it
if cursor_byte_pos < self.data_offset {
self.data_offset = (cursor_byte_pos) - cursor_row_offset;
}
if cursor_byte_pos > (self.data_offset + self.get_bytes_per_screen() - 1) {
self.data_offset = cursor_byte_pos - cursor_row_offset -
self.get_bytes_per_screen() + self.get_line_width();
}
// If the cursor moves to the right or left of the view, scroll it
if cursor_row_offset < self.row_offset {
self.row_offset = cursor_row_offset;
}
if cursor_row_offset >= self.row_offset + self.get_bytes_per_row() {
self.row_offset = cursor_row_offset - self.get_bytes_per_row() + 1;
}
}
fn toggle_selection(&mut self) {
match self.selection_start {
Some(_) => self.selection_start = None,
None => self.selection_start = Some(self.cursor_nibble_pos / 2)
}
}
fn goto(&mut self, pos: isize) {
self.status(format!("Going to {:?}", pos));
self.set_cursor(pos * 2);
}
fn find_buf(&mut self, needle: &[u8]) {
let found_pos = match self.buffer.find_slice_from((self.cursor_nibble_pos / 2) as usize, needle) {
None => {
self.buffer.find_slice_from(0, needle)
}
a => a
};
if let Some(pos) = found_pos {
self.status(format!("Found at {:?}", pos));
self.set_cursor((pos * 2) as isize);
} else {
self.status("Nothing found!");
}
}
fn read_cursor_to_clipboard(&mut self) -> Option<usize> {
let (start, stop) = match self.selection_start {
None => { return None; },
Some(selection_pos) => {
(cmp::min(selection_pos, self.cursor_nibble_pos / 2),
cmp::max(selection_pos, self.cursor_nibble_pos / 2))
}
};
let data = self.buffer.copy_out(start as usize..stop as usize);
let data_len = data.len();
self.clipboard = Some(data);
Some(data_len)
}
fn edit_copy(&mut self) {
if let Some(data_len) = self.read_cursor_to_clipboard() {
self.status(format!("Copied {}", data_len));
self.selection_start = None;
}
}
fn edit_cut(&mut self) {
if let Some(data_len) = self.read_cursor_to_clipboard() {
self.delete_at_cursor(false);
self.status(format!("Cut {}", data_len));
}
}
fn edit_paste(&mut self) {
let data = if let Some(ref d) = self.clipboard {
d.clone()
} else {
return;
};
let data_len = data.len() as isize;
// This is needed to satisfy the borrow checker
let cur_pos_in_bytes = self.cursor_nibble_pos / 2;
self.edit_buffer(EditOp::Insert(cur_pos_in_bytes, data), true);
self.move_cursor(data_len + 1);
}
fn view_input(&mut self, key: Key) {
if let Some(action) = self.input.editor_input(key) {
self.do_action(action)
}
}
fn do_action(&mut self, action: HexEditActions) {
self.clear_status();
match action {
// Movement
HexEditActions::MoveLeft if self.nibble_active => self.move_cursor(-1),
HexEditActions::MoveRight if self.nibble_active => self.move_cursor(1),
HexEditActions::MoveLeft if !self.nibble_active => self.move_cursor(-2),
HexEditActions::MoveRight if !self.nibble_active => self.move_cursor(2),
HexEditActions::MoveLeft => panic!("Make the case handler happy!"),
HexEditActions::MoveRight => panic!("Make the case handler happy!"),
HexEditActions::MoveUp => {
let t = -self.get_line_width() * 2;
self.move_cursor(t)
}
HexEditActions::MoveDown => {
let t = self.get_line_width() * 2;
self.move_cursor(t)
}
HexEditActions::MovePageUp => {
let t = -(self.get_bytes_per_screen() * 2);
self.move_cursor(t)
}
HexEditActions::MovePageDown => {
let t = self.get_bytes_per_screen() * 2;
self.move_cursor(t)
}
HexEditActions::MoveToFirstColumn => {
let pos_in_line = self.cursor_nibble_pos % (self.get_line_width()*2);
self.move_cursor(-pos_in_line)
}
HexEditActions::MoveToLastColumn => {
let pos_in_line = self.cursor_nibble_pos % (self.get_line_width()*2);
let i = self.get_line_width()*2 - 2 - pos_in_line;
self.move_cursor(i);
}
HexEditActions::Delete => self.delete_at_cursor(false),
HexEditActions::DeleteWithMove => self.delete_at_cursor(true),
// Ctrl X, C V
HexEditActions::CutSelection => self.edit_cut(),
HexEditActions::CopySelection => self.edit_copy(),
HexEditActions::PasteSelection => self.edit_paste(),
// Hex input for nibble view
HexEditActions::Edit(ch) if self.nibble_active => {
if let Some(val) = ch.to_digit(16) {
self.write_nibble_at_cursor(val as u8);
self.move_cursor(1);
} else {
// TODO: Show error?
}
},
// Ascii edit for byte view
HexEditActions::Edit(ch) if !self.nibble_active => {
if ch.len_utf8() == 1 && ch.is_alphanumeric() {
// TODO: Make it printable rather than alphanumeric
self.write_byte_at_cursor(ch as u8);
self.move_cursor(2);
} else {
// TODO: Show error?
}
}
HexEditActions::Edit(_) => panic!("Make the case handler happy!"),
HexEditActions::SwitchView => {
self.nibble_active = !self.nibble_active;
},
HexEditActions::HelpView => self.start_help(),
HexEditActions::LogView => self.start_logview(),
HexEditActions::ToggleInsert => self.toggle_insert_mode(),
HexEditActions::ToggleSelecion => self.toggle_selection(),
HexEditActions::Undo => self.undo(),
HexEditActions::AskGoto => self.start_goto(),
HexEditActions::AskFind => self.start_find(),
HexEditActions::AskOpen => self.start_open(),
HexEditActions::AskSave => self.start_save(),
HexEditActions::AskConfig => self.start_config(),
HexEditActions::StartMenu => self.start_menu(),
_ => self.status(format!("Operation not implemented yet: {:?}", action))
}
}
fn start_menu(&mut self) {
let sr = &self.signal_receiver;
let mut menu = OverlayMenu::with_menu(ROOT_ENTRIES);
menu.on_selected.connect(signal!(sr with |obj, action| {
obj.child_widget = None;
obj.do_action(action);
}));
menu.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(menu), OVERLAY_LAYOUT));
}
fn start_config(&mut self) {
let sr = &self.signal_receiver;
let mut config_screen = ConfigScreen::with_config(self.config.clone());
config_screen.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
config_screen.on_selected.connect(signal!(sr with |obj, conf_name| {
obj.child_widget = None;
obj.start_config_edit(conf_name);
}));
self.child_widget = Some((Box::new(config_screen), OVERLAY_LAYOUT));
}
fn start_config_edit(&mut self, conf_name: &'static str) {
let sr = &self.signal_receiver;
let mut config_set = ConfigSetLine::new(format!("{} = ", conf_name));
config_set.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
config_set.on_done.connect(signal!(sr with |obj, config_value| {
obj.child_widget = None;
obj.set_config(conf_name, &config_value);
}));
self.child_widget = Some((Box::new(config_set), INPUTLINE_LAYOUT));
}
fn set_config(&mut self, key: &str, val: &str) {
let res = self.config.borrow_mut().set_from_key_value(key, &val);
res.unwrap_or_else(
|e| self.status(format!("Can't set {} to {}: {}", key, val, e))
);
}
fn start_help(&mut self) {
let help_text = include_str!("Help.txt");
// YAY Lifetimes! (This will hopfully be fixed once rust gains MIR/HIR)
{
let sr = &self.signal_receiver;
let mut ot = OverlayText::with_text(help_text.to_string(), false);
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(ot), OVERLAY_LAYOUT));
}
{
self.status("Press Esc to return");
}
}
fn start_logview(&mut self) {
let logs = self.status_log.clone();
let sr = &self.signal_receiver;
let mut ot = OverlayText::with_logs(logs, true);
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(ot), OVERLAY_LAYOUT));
}
fn start_goto(&mut self) {
let mut gt = GotoInputLine::new();
let sr = &self.signal_receiver;
gt.on_done.connect(signal!(sr with |obj, pos| {
obj.child_widget = None;
obj.goto(pos*2);
}));
gt.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(gt) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_find(&mut self) {
let mut find_line = FindInputLine::new();
let sr = &self.signal_receiver;
find_line.on_find.connect(signal!(sr with |obj, needle| {
obj.child_widget = None;
obj.find_buf(&needle);
}));
find_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(find_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_save(&mut self) {
let mut path_line = PathInputLine::new("Save: ".into());
let sr = &self.signal_receiver;
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.child_widget = None;
obj.save(&path);
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(path_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn start_open(&mut self) {
let mut path_line = PathInputLine::new("Open: ".into());
let sr = &self.signal_receiver;
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.child_widget = None;
obj.open(&path);
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
obj.child_widget = None;
if let Some(ref msg) = opt_msg {
obj.status(msg.clone());
} else {
obj.clear_status();
}
}));
self.child_widget = Some((Box::new(path_line) as Box<Widget>, INPUTLINE_LAYOUT));
}
fn process_msgs(&mut self) {
let sr = self.signal_receiver.clone();
sr.run(self);
}
pub fn input(&mut self, key: Key) {
self.process_msgs();
if let Some((ref mut child_widget, _)) = self.child_widget {
child_widget.input(&self.input, key);
} else {
self.view_input(key);
}
self.process_msgs();
}
pub fn resize(&mut self, width: i32, height: i32) {
self.rect.height = height as isize - 1; // Substract 1 for the status line on the bottom
self.rect.width = width as isize;
self.update_cursor();
}
}
|
use std::str;
use std::cmp;
use std::path::Path;
use std::path::PathBuf;
use util::{string_with_repeat, is_between};
use std::error::Error;
use std::ascii::AsciiExt;
use rustbox::{RustBox};
use rustbox::keyboard::Key;
use super::super::buffer::Buffer;
use super::super::segment::Segment;
use super::common::{Rect, u8_to_hex};
use super::RustBoxEx::{RustBoxEx, Style};
use super::input::Input;
use super::inputline::{InputLine, GotoInputLine, FindInputLine, PathInputLine};
use super::overlay::OverlayText;
#[derive(Debug)]
enum UndoAction {
Delete(isize, isize),
Insert(isize, Vec<u8>),
Write(isize, Vec<u8>),
}
#[derive(Copy,Clone,Debug)]
pub enum HexEditActions {
Edit(char),
SwitchView,
MoveLeft,
MoveRight,
MoveUp,
MoveDown,
MovePageUp,
MovePageDown,
Delete,
DeleteWithMove,
CopySelection,
CutSelection,
PasteSelection,
Undo,
ToggleInsert,
ToggleSelecion,
HelpView,
AskGoto,
AskFind,
AskOpen,
AskSave
}
signalreceiver_decl!{HexEditSignalReceiver(HexEdit)}
pub struct HexEdit {
buffer: Segment,
cursor_pos: isize,
cur_height: isize,
cur_width: isize,
nibble_width: isize,
nibble_size: isize,
data_size: isize,
status_log: Vec<String>,
data_offset: isize,
nibble_start: isize,
nibble_active: bool,
selection_start: Option<isize>,
insert_mode: bool,
input: Input,
undo_stack: Vec<UndoAction>,
input_entry: Option<Box<InputLine>>,
overlay: Option<OverlayText>,
cur_path: Option<PathBuf>,
clipboard: Option<Vec<u8>>,
signal_receiver: Option<HexEditSignalReceiver>,
}
impl HexEdit {
pub fn new() -> HexEdit {
HexEdit {
buffer: Segment::new(),
cursor_pos: 0,
nibble_size: 0,
cur_width: 50,
cur_height: 50,
nibble_width: 1,
data_offset: 0,
nibble_start: 0,
data_size: 0,
status_log: vec!("Press C-/ for help".to_string()),
nibble_active: true,
selection_start: None,
insert_mode: false,
input_entry: None,
undo_stack: Vec::new(),
overlay: None,
cur_path: None,
clipboard: None,
input: Input::new(),
signal_receiver: Some(HexEditSignalReceiver::new()),
}
}
fn reset(&mut self) {
self.cursor_pos = 0;
self.data_offset = 0;
self.nibble_active = true;
self.selection_start = None;
self.insert_mode = false;
self.input_entry = None;
self.undo_stack = Vec::new();
self.recalculate();
}
fn draw_line(&self, rb: &RustBox, iter: &mut Iterator<Item=(usize, Option<&u8>)>, row: usize) {
let nibble_view_start = self.nibble_start as usize;
let byte_view_start = nibble_view_start + (self.nibble_width as usize / 2) * 3;
// We want the selection draw to not go out of the editor view
let mut prev_in_selection = false;
for (byte_i, maybe_byte) in iter {
// let row = byte_i / (self.nibble_width as usize / 2);
let column = byte_i % (self.nibble_width as usize / 2);
let byte_pos = byte_i as isize + self.data_offset / 2;
let at_current_byte = byte_pos == (self.cursor_pos / 2);
let in_selection = if let Some(selection_pos) = self.selection_start {
is_between(byte_pos, selection_pos / 2, self.cursor_pos / 2)
} else {
false
};
// Now we draw the nibble view
let hex_chars = if let Some(&byte) = maybe_byte {
u8_to_hex(byte)
} else {
(' ', ' ')
};
let nibble_view_column = nibble_view_start + (column * 3);
let nibble_style = if (!self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(nibble_view_column, row, nibble_style,
hex_chars.0);
rb.print_char_style(nibble_view_column + 1, row, nibble_style,
hex_chars.1);
if prev_in_selection && in_selection {
rb.print_char_style(nibble_view_column - 1, row, nibble_style,
' ');
}
if self.nibble_active && self.input_entry.is_none() && at_current_byte {
rb.set_cursor(nibble_view_column as isize + (self.cursor_pos & 1),
row as isize);
};
// Now let's draw the byte window
let byte_char = if let Some(&byte) = maybe_byte {
let bc = byte as char;
if bc.is_ascii() && bc.is_alphanumeric() {
bc
} else {
'.'
}
} else {
' '
};
// If we are at the current byte but the nibble view is active, we want to draw a
// "fake" cursor by dawing a selection square
let byte_style = if (self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(byte_view_start + column, row, byte_style,
byte_char);
if !self.nibble_active && self.input_entry.is_none() && at_current_byte {
rb.set_cursor((byte_view_start + column) as isize, row as isize);
}
// Remember if we had a selection, so that we know for next char to "fill in" with
// selection in the nibble view
prev_in_selection = in_selection;
}
}
pub fn draw_view(&self, rb: &RustBox) {
let extra_none: &[Option<&u8>] = &[None];
let start_iter = (self.data_offset / 2) as usize;
let stop_iter = cmp::min(start_iter + (self.nibble_size / 2) as usize, self.buffer.len());
let row_count = (stop_iter - start_iter) / (self.nibble_width as usize / 2) + 1;
let mut itit_ = self.buffer.iter_range(start_iter, stop_iter)
// This is needed for the "fake" last element for insertion mode
.map(|x| Some(x))
.chain(extra_none.iter().map(|n| *n))
.enumerate().peekable();
let mut itit = itit_.by_ref();
for row in 0..row_count {
let byte_pos = itit.peek().unwrap().0 as isize + self.data_offset / 2;
if self.nibble_start == 5 {
rb.print_style(0, row, Style::Default, &format!("{:04X}", byte_pos));
} else {
rb.print_style(0, row, Style::Default, &format!("{:04X}:{:04X}", byte_pos >> 16, byte_pos & 0xFFFF));
}
self.draw_line(rb, &mut itit.take((self.nibble_width as usize / 2)), row);
}
}
fn draw_statusbar(&self, rb: &RustBox) {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &string_with_repeat(' ', rb.width()));
if let Some(ref status_line) = self.status_log.last() {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &status_line);
}
let right_status = format!(
"overlay = {:?}, input = {:?} undo = {:?}, pos = {:?}, selection = {:?}, insert = {:?}",
self.overlay.is_none(), self.input_entry.is_none(), self.undo_stack.len(),
self.cursor_pos, self.selection_start, self.insert_mode);
rb.print_style(rb.width() - right_status.len(), rb.height() - 1, Style::StatusBar, &right_status);
}
pub fn draw(&mut self, rb: &RustBox) {
self.draw_view(rb);
if let Some(entry) = self.input_entry.as_mut() {
entry.draw(rb, Rect {
top: (rb.height() - 2) as isize,
bottom: (rb.height() - 1) as isize,
left: 0,
right: rb.width() as isize
}, true);
}
if let Some(overlay) = self.overlay.as_mut() {
overlay.draw(rb, Rect {
top: 0,
bottom: self.cur_height,
left: 0,
right: self.cur_width,
}, true);
}
self.draw_statusbar(rb);
}
fn status(&mut self, st: String) {
self.status_log.push(st);
}
pub fn open(&mut self, path: &Path) {
match Segment::from_path(path) {
Ok(buf) => {
self.buffer = buf;
self.cur_path = Some(PathBuf::from(path));
self.reset();
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
pub fn save(&mut self, path: &Path) {
match self.buffer.save(path) {
Ok(_) => {
self.cur_path = Some(PathBuf::from(path));
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
fn do_action(&mut self, act: UndoAction, add_to_undo: bool) -> (isize, isize) {
let stat = format!("doing = {:?}", act);
let mut begin_region: isize;
let mut end_region: isize;
match act {
UndoAction::Insert(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
self.buffer.insert(offset as usize, &buf);
if add_to_undo {
self.push_undo(UndoAction::Delete(offset, offset + buf.len() as isize))
}
self.recalculate();
}
UndoAction::Delete(offset, end) => {
begin_region = offset;
end_region = end;
let res = self.buffer.remove(offset as usize, end as usize);
if add_to_undo { self.push_undo(UndoAction::Insert(offset, res)) }
self.recalculate();
}
UndoAction::Write(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
let orig_data = self.buffer.read(offset as usize, buf.len());
self.buffer.write(offset as usize, &buf);
if add_to_undo { self.push_undo(UndoAction::Write(offset, orig_data)) }
}
}
self.status(stat);
(begin_region, end_region)
}
fn push_undo(&mut self, act: UndoAction) {
self.undo_stack.push(act);
}
fn undo(&mut self) {
match self.undo_stack.pop() {
Some(act) => {
let (begin, _) = self.do_action(act, false);
self.set_cursor(begin * 2);
}
None => ()
}
}
fn cursor_at_end(&self) -> bool {
self.cursor_pos == self.data_size
}
fn delete_at_cursor(&mut self, with_bksp: bool) {
let mut cursor_pos = self.cursor_pos;
let selection_pos = match self.selection_start {
Some(selection_pos_tag) => selection_pos_tag,
None => {
if with_bksp {
if cursor_pos < 2 {
return;
}
cursor_pos -= 2;
}
cursor_pos
}
};
let del_start = cmp::min(selection_pos, cursor_pos) / 2;
let mut del_stop = cmp::max(selection_pos, cursor_pos) / 2 + 1;
if del_stop > self.data_size / 2 {
del_stop -= 1;
if del_stop == del_start {
return;
}
}
if self.data_size == 0 {
self.status(format!("Nothing to delete"));
return;
}
self.selection_start = None;
self.do_action(UndoAction::Delete(del_start, del_stop), true);
self.set_cursor(del_start * 2);
}
fn write_nibble_at_cursor(&mut self, c: u8) {
match self.selection_start {
Some(_) => self.delete_at_cursor(false),
None => ()
}
if self.insert_mode || self.cursor_at_end() {
self.insert_nibble_at_cursor(c);
} else {
self.set_nibble_at_cursor(c);
}
}
fn set_nibble_at_cursor(&mut self, c: u8) {
let mut byte = self.buffer[(self.cursor_pos / 2) as usize];
byte = match self.cursor_pos & 1 {
0 => (byte & 0x0f) + c * 16,
1 => (byte & 0xf0) + c,
_ => 0xff,
};
let byte_offset = self.cursor_pos / 2;
self.do_action(UndoAction::Write(byte_offset, vec!(byte)), true);
}
fn insert_nibble_at_cursor(&mut self, c: u8) {
// If we are at half byte, we still overwrite
if self.cursor_pos & 1 == 1 {
self.set_nibble_at_cursor(c);
return
}
let pos_div2 = self.cursor_pos / 2;
self.do_action(UndoAction::Insert(pos_div2, vec!(c * 16)), true);
}
fn toggle_insert_mode(&mut self) {
self.insert_mode = !self.insert_mode;
self.move_cursor(0);
}
fn write_byte_at_cursor(&mut self, c: u8) {
match self.selection_start {
Some(_) => self.delete_at_cursor(false),
None => ()
}
let byte_offset = self.cursor_pos / 2;
if self.insert_mode || self.cursor_at_end() {
self.do_action(UndoAction::Insert(byte_offset, vec!(c)), true);
} else {
self.do_action(UndoAction::Write(byte_offset, vec!(c)), true);
}
}
fn move_cursor(&mut self, pos: isize) {
self.cursor_pos += pos;
self.update_cursor()
}
fn set_cursor(&mut self, pos: isize) {
self.cursor_pos = pos;
self.update_cursor()
}
fn update_cursor(&mut self) {
self.cursor_pos = cmp::max(self.cursor_pos, 0);
self.cursor_pos = cmp::min(self.cursor_pos, self.data_size);
if self.cursor_pos < self.data_offset {
self.data_offset = (self.cursor_pos / self.nibble_width) * self.nibble_width;
}
if self.cursor_pos > (self.data_offset + self.nibble_size - 1) {
let end_row = self.cursor_pos - (self.cursor_pos % self.nibble_width) -
self.nibble_size + self.nibble_width;
self.data_offset = end_row;
}
}
fn toggle_selection(&mut self) {
match self.selection_start {
Some(_) => self.selection_start = None,
None => self.selection_start = Some(self.cursor_pos)
}
let st = format!("selection = {:?}", self.selection_start);
self.status(st.clone());
}
fn goto(&mut self, pos: isize) {
self.status(format!("Going to {:?}", pos));
self.set_cursor(pos * 2);
}
fn find_buf(&mut self, needle: &[u8]) {
let found_pos = match self.buffer.find_from((self.cursor_pos / 2) as usize, needle) {
None => {
self.buffer.find_from(0, needle)
}
a => a
};
match found_pos {
Some(pos) => {
self.status(format!("Found at {:?}", pos));
self.set_cursor((pos * 2) as isize);
}
None => {
self.status(format!("Nothing found!"));
}
};
}
fn read_cursor_to_clipboard(&mut self) -> Option<usize> {
let (start, stop) = match self.selection_start {
None => { return None; },
Some(selection_pos) => {
(cmp::min(selection_pos, self.cursor_pos) / 2,
cmp::max(selection_pos, self.cursor_pos) / 2)
}
};
let data = self.buffer.read(start as usize, stop as usize);
let data_len = data.len();
self.clipboard = Some(data);
Some(data_len)
}
fn edit_copy(&mut self) {
match self.read_cursor_to_clipboard() {
Some(data_len) => self.status(format!("Copied {}", data_len)),
None => ()
}
}
fn edit_cut(&mut self) {
match self.read_cursor_to_clipboard() {
Some(data_len) => {
self.delete_at_cursor(false);
self.status(format!("Cut {}", data_len));
}
None => ()
}
}
fn edit_paste(&mut self) {
let data;
match self.clipboard {
Some(ref d) => { data = d.clone(); },
None => { return; }
};
let pos_div2 = self.cursor_pos / 2;
self.do_action(UndoAction::Insert(pos_div2, data), true);
}
fn view_input(&mut self, key: Key) {
let action = self.input.editor_input(key);
if action.is_none() {
return;
}
match action.unwrap() {
// Movement
HexEditActions::MoveLeft if self.nibble_active => self.move_cursor(-1),
HexEditActions::MoveRight if self.nibble_active => self.move_cursor(1),
HexEditActions::MoveLeft if !self.nibble_active => self.move_cursor(-2),
HexEditActions::MoveRight if !self.nibble_active => self.move_cursor(2),
HexEditActions::MoveUp => {
let t = -self.nibble_width;
self.move_cursor(t)
}
HexEditActions::MoveDown => {
let t = self.nibble_width;
self.move_cursor(t)
}
HexEditActions::MovePageUp => {
let t = -(self.nibble_size - self.nibble_width) / 2;
self.move_cursor(t)
}
HexEditActions::MovePageDown => {
let t = (self.nibble_size - self.nibble_width) / 2;
self.move_cursor(t)
}
// UndoAction::Delete
HexEditActions::Delete => self.delete_at_cursor(false),
HexEditActions::DeleteWithMove => self.delete_at_cursor(true),
// Ctrl X, C V
HexEditActions::CutSelection => self.edit_cut(),
HexEditActions::CopySelection => self.edit_copy(),
HexEditActions::PasteSelection => self.edit_paste(),
// Hex input for nibble view
HexEditActions::Edit(ch) if self.nibble_active => {
match ch.to_digit(16) {
Some(val) => {
self.write_nibble_at_cursor(val as u8);
self.move_cursor(1);
}
None => () // TODO: Show error?
}
},
// Ascii edit for byte view
HexEditActions::Edit(ch) if !self.nibble_active => {
if ch.len_utf8() == 1 && ch.is_alphanumeric() {
// TODO: Make it printable rather than alphanumeric
self.write_byte_at_cursor(ch as u8);
self.move_cursor(2);
} else {
// TODO: Show error?
}
}
HexEditActions::SwitchView => {
self.nibble_active = !self.nibble_active;
let t = self.nibble_active;
self.status(format!("nibble_active = {:?}", t));
},
HexEditActions::HelpView => self.start_help(),
HexEditActions::ToggleInsert => self.toggle_insert_mode(),
HexEditActions::ToggleSelecion => self.toggle_selection(),
HexEditActions::Undo => self.undo(),
HexEditActions::AskGoto => self.start_goto(),
HexEditActions::AskFind => self.start_find(),
HexEditActions::AskOpen => self.start_open(),
HexEditActions::AskSave => self.start_save(),
_ => self.status(format!("key = {:?}", key)),
}
}
fn start_help(&mut self) {
let help_text = include_str!("Help.txt");
let ref sr = self.signal_receiver.as_mut().unwrap();
let mut ot = OverlayText::with_text(help_text.to_string());
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.overlay = None;
}));
self.overlay = Some(ot);
}
fn start_goto(&mut self) {
let mut gt = GotoInputLine::new();
// let mut sender_clone0 = self.sender.clone();
let ref sr = self.signal_receiver.as_mut().unwrap();
gt.on_done.connect(signal!(sr with |obj, pos| {
obj.goto(pos*2);
obj.input_entry = None;
}));
gt.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(gt) as Box<InputLine>)
}
fn start_find(&mut self) {
let mut find_line = FindInputLine::new();
let ref sr = self.signal_receiver.as_mut().unwrap();
find_line.on_find.connect(signal!(sr with |obj, needle| {
obj.find_buf(&needle);
obj.input_entry = None;
}));
find_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(find_line) as Box<InputLine>)
}
fn start_save(&mut self) {
let mut path_line = PathInputLine::new("Save: ".into());
let ref sr = self.signal_receiver.as_mut().unwrap();
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.save(&path);
obj.input_entry = None;
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(path_line) as Box<InputLine>)
}
fn start_open(&mut self) {
let mut path_line = PathInputLine::new("Open: ".into());
let ref sr = self.signal_receiver.as_mut().unwrap();
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.open(&path);
obj.input_entry = None;
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(path_line) as Box<InputLine>)
}
fn process_msgs(&mut self) {
let mut sr = self.signal_receiver.take().unwrap();
sr.run(self);
self.signal_receiver = Some(sr);
}
pub fn input(&mut self, key: Key) {
self.process_msgs();
match self.overlay {
Some(ref mut overlay) => {
overlay.input(&self.input, key);
return;
}
None => ()
}
match self.input_entry {
Some(ref mut input_entry) => {
input_entry.input(&self.input, key);
return;
}
None => ()
}
self.view_input(key);
self.process_msgs();
}
fn recalculate(&mut self) {
self.data_size = (self.buffer.len() * 2) as isize;
let (new_width, new_height) = (self.cur_width as i32, (self.cur_height + 1) as i32);
self.resize(new_width, new_height);
}
pub fn resize(&mut self, width: i32, height: i32) {
self.cur_height = (height as isize) - 1;
self.cur_width = width as isize;
self.nibble_start = if self.data_size / 2 <= 0xFFFF { 1 + 4 } else { 2 + 8 };
self.nibble_width = 2 * ((self.cur_width - self.nibble_start) / 4);
self.nibble_size = self.nibble_width * self.cur_height;
}
}
Move some of the offset calculations into the main iterator in the main draw function
use std::str;
use std::cmp;
use std::path::Path;
use std::path::PathBuf;
use util::{string_with_repeat, is_between};
use std::error::Error;
use std::ascii::AsciiExt;
use rustbox::{RustBox};
use rustbox::keyboard::Key;
use super::super::buffer::Buffer;
use super::super::segment::Segment;
use super::common::{Rect, u8_to_hex};
use super::RustBoxEx::{RustBoxEx, Style};
use super::input::Input;
use super::inputline::{InputLine, GotoInputLine, FindInputLine, PathInputLine};
use super::overlay::OverlayText;
#[derive(Debug)]
enum UndoAction {
Delete(isize, isize),
Insert(isize, Vec<u8>),
Write(isize, Vec<u8>),
}
#[derive(Copy,Clone,Debug)]
pub enum HexEditActions {
Edit(char),
SwitchView,
MoveLeft,
MoveRight,
MoveUp,
MoveDown,
MovePageUp,
MovePageDown,
Delete,
DeleteWithMove,
CopySelection,
CutSelection,
PasteSelection,
Undo,
ToggleInsert,
ToggleSelecion,
HelpView,
AskGoto,
AskFind,
AskOpen,
AskSave
}
signalreceiver_decl!{HexEditSignalReceiver(HexEdit)}
pub struct HexEdit {
buffer: Segment,
cursor_pos: isize,
cur_height: isize,
cur_width: isize,
nibble_width: isize,
nibble_size: isize,
data_size: isize,
status_log: Vec<String>,
data_offset: isize,
nibble_start: isize,
nibble_active: bool,
selection_start: Option<isize>,
insert_mode: bool,
input: Input,
undo_stack: Vec<UndoAction>,
input_entry: Option<Box<InputLine>>,
overlay: Option<OverlayText>,
cur_path: Option<PathBuf>,
clipboard: Option<Vec<u8>>,
signal_receiver: Option<HexEditSignalReceiver>,
}
impl HexEdit {
pub fn new() -> HexEdit {
HexEdit {
buffer: Segment::new(),
cursor_pos: 0,
nibble_size: 0,
cur_width: 50,
cur_height: 50,
nibble_width: 1,
data_offset: 0,
nibble_start: 0,
data_size: 0,
status_log: vec!("Press C-/ for help".to_string()),
nibble_active: true,
selection_start: None,
insert_mode: false,
input_entry: None,
undo_stack: Vec::new(),
overlay: None,
cur_path: None,
clipboard: None,
input: Input::new(),
signal_receiver: Some(HexEditSignalReceiver::new()),
}
}
fn reset(&mut self) {
self.cursor_pos = 0;
self.data_offset = 0;
self.nibble_active = true;
self.selection_start = None;
self.insert_mode = false;
self.input_entry = None;
self.undo_stack = Vec::new();
self.recalculate();
}
fn draw_line(&self, rb: &RustBox, iter: &mut Iterator<Item=(usize, Option<&u8>)>, row: usize) {
let nibble_view_start = self.nibble_start as usize;
let byte_view_start = nibble_view_start + (self.nibble_width as usize / 2) * 3;
// We want the selection draw to not go out of the editor view
let mut prev_in_selection = false;
for (row_offset, (byte_pos, maybe_byte)) in iter.enumerate() {
let at_current_byte = byte_pos as isize == (self.cursor_pos / 2);
let in_selection = if let Some(selection_pos) = self.selection_start {
is_between(byte_pos as isize, selection_pos / 2, self.cursor_pos / 2)
} else {
false
};
// Now we draw the nibble view
let hex_chars = if let Some(&byte) = maybe_byte {
u8_to_hex(byte)
} else {
(' ', ' ')
};
let nibble_view_column = nibble_view_start + (row_offset * 3);
let nibble_style = if (!self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(nibble_view_column, row, nibble_style,
hex_chars.0);
rb.print_char_style(nibble_view_column + 1, row, nibble_style,
hex_chars.1);
if prev_in_selection && in_selection {
rb.print_char_style(nibble_view_column - 1, row, nibble_style,
' ');
}
if self.nibble_active && self.input_entry.is_none() && at_current_byte {
rb.set_cursor(nibble_view_column as isize + (self.cursor_pos & 1),
row as isize);
};
// Now let's draw the byte window
let byte_char = if let Some(&byte) = maybe_byte {
let bc = byte as char;
if bc.is_ascii() && bc.is_alphanumeric() {
bc
} else {
'.'
}
} else {
' '
};
// If we are at the current byte but the nibble view is active, we want to draw a
// "fake" cursor by dawing a selection square
let byte_style = if (self.nibble_active && at_current_byte) || in_selection {
Style::Selection
} else {
Style::Default
};
rb.print_char_style(byte_view_start + row_offset, row, byte_style,
byte_char);
if !self.nibble_active && self.input_entry.is_none() && at_current_byte {
rb.set_cursor((byte_view_start + row_offset) as isize, row as isize);
}
// Remember if we had a selection, so that we know for next char to "fill in" with
// selection in the nibble view
prev_in_selection = in_selection;
}
}
pub fn draw_view(&self, rb: &RustBox) {
let extra_none: &[Option<&u8>] = &[None];
let start_iter = (self.data_offset / 2) as usize;
let stop_iter = cmp::min(start_iter + (self.nibble_size / 2) as usize, self.buffer.len());
let row_count = (stop_iter - start_iter) / (self.nibble_width as usize / 2) + 1;
// We need this so that the iterator is stayed alive for the by_ref later
let mut itit_ = (start_iter..).zip(self.buffer.iter_range(start_iter, stop_iter)
// This is needed for the "fake" last element for insertion mode
.map(|x| Some(x))
.chain(extra_none.iter().map(|n| *n))) // So the last item will be a None
.peekable();
// We need to take the iterator by ref so we can take from it later without transfering ownership
let mut itit = itit_.by_ref();
for row in 0..row_count {
let byte_pos = itit.peek().unwrap().0 as isize;
if self.nibble_start == 5 {
rb.print_style(0, row, Style::Default, &format!("{:04X}", byte_pos));
} else {
rb.print_style(0, row, Style::Default, &format!("{:04X}:{:04X}", byte_pos >> 16, byte_pos & 0xFFFF));
}
self.draw_line(rb, &mut itit.take((self.nibble_width as usize / 2)), row);
}
}
fn draw_statusbar(&self, rb: &RustBox) {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &string_with_repeat(' ', rb.width()));
if let Some(ref status_line) = self.status_log.last() {
rb.print_style(0, rb.height() - 1, Style::StatusBar, &status_line);
}
let right_status = format!(
"overlay = {:?}, input = {:?} undo = {:?}, pos = {:?}, selection = {:?}, insert = {:?}",
self.overlay.is_none(), self.input_entry.is_none(), self.undo_stack.len(),
self.cursor_pos, self.selection_start, self.insert_mode);
rb.print_style(rb.width() - right_status.len(), rb.height() - 1, Style::StatusBar, &right_status);
}
pub fn draw(&mut self, rb: &RustBox) {
self.draw_view(rb);
if let Some(entry) = self.input_entry.as_mut() {
entry.draw(rb, Rect {
top: (rb.height() - 2) as isize,
bottom: (rb.height() - 1) as isize,
left: 0,
right: rb.width() as isize
}, true);
}
if let Some(overlay) = self.overlay.as_mut() {
overlay.draw(rb, Rect {
top: 0,
bottom: self.cur_height,
left: 0,
right: self.cur_width,
}, true);
}
self.draw_statusbar(rb);
}
fn status(&mut self, st: String) {
self.status_log.push(st);
}
pub fn open(&mut self, path: &Path) {
match Segment::from_path(path) {
Ok(buf) => {
self.buffer = buf;
self.cur_path = Some(PathBuf::from(path));
self.reset();
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
pub fn save(&mut self, path: &Path) {
match self.buffer.save(path) {
Ok(_) => {
self.cur_path = Some(PathBuf::from(path));
}
Err(e) => {
self.status(format!("ERROR: {}", e.description()));
}
}
}
fn do_action(&mut self, act: UndoAction, add_to_undo: bool) -> (isize, isize) {
let stat = format!("doing = {:?}", act);
let mut begin_region: isize;
let mut end_region: isize;
match act {
UndoAction::Insert(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
self.buffer.insert(offset as usize, &buf);
if add_to_undo {
self.push_undo(UndoAction::Delete(offset, offset + buf.len() as isize))
}
self.recalculate();
}
UndoAction::Delete(offset, end) => {
begin_region = offset;
end_region = end;
let res = self.buffer.remove(offset as usize, end as usize);
if add_to_undo { self.push_undo(UndoAction::Insert(offset, res)) }
self.recalculate();
}
UndoAction::Write(offset, buf) => {
begin_region = offset;
end_region = offset + buf.len() as isize;
let orig_data = self.buffer.read(offset as usize, buf.len());
self.buffer.write(offset as usize, &buf);
if add_to_undo { self.push_undo(UndoAction::Write(offset, orig_data)) }
}
}
self.status(stat);
(begin_region, end_region)
}
fn push_undo(&mut self, act: UndoAction) {
self.undo_stack.push(act);
}
fn undo(&mut self) {
match self.undo_stack.pop() {
Some(act) => {
let (begin, _) = self.do_action(act, false);
self.set_cursor(begin * 2);
}
None => ()
}
}
fn cursor_at_end(&self) -> bool {
self.cursor_pos == self.data_size
}
fn delete_at_cursor(&mut self, with_bksp: bool) {
let mut cursor_pos = self.cursor_pos;
let selection_pos = match self.selection_start {
Some(selection_pos_tag) => selection_pos_tag,
None => {
if with_bksp {
if cursor_pos < 2 {
return;
}
cursor_pos -= 2;
}
cursor_pos
}
};
let del_start = cmp::min(selection_pos, cursor_pos) / 2;
let mut del_stop = cmp::max(selection_pos, cursor_pos) / 2 + 1;
if del_stop > self.data_size / 2 {
del_stop -= 1;
if del_stop == del_start {
return;
}
}
if self.data_size == 0 {
self.status(format!("Nothing to delete"));
return;
}
self.selection_start = None;
self.do_action(UndoAction::Delete(del_start, del_stop), true);
self.set_cursor(del_start * 2);
}
fn write_nibble_at_cursor(&mut self, c: u8) {
match self.selection_start {
Some(_) => self.delete_at_cursor(false),
None => ()
}
if self.insert_mode || self.cursor_at_end() {
self.insert_nibble_at_cursor(c);
} else {
self.set_nibble_at_cursor(c);
}
}
fn set_nibble_at_cursor(&mut self, c: u8) {
let mut byte = self.buffer[(self.cursor_pos / 2) as usize];
byte = match self.cursor_pos & 1 {
0 => (byte & 0x0f) + c * 16,
1 => (byte & 0xf0) + c,
_ => 0xff,
};
let byte_offset = self.cursor_pos / 2;
self.do_action(UndoAction::Write(byte_offset, vec!(byte)), true);
}
fn insert_nibble_at_cursor(&mut self, c: u8) {
// If we are at half byte, we still overwrite
if self.cursor_pos & 1 == 1 {
self.set_nibble_at_cursor(c);
return
}
let pos_div2 = self.cursor_pos / 2;
self.do_action(UndoAction::Insert(pos_div2, vec!(c * 16)), true);
}
fn toggle_insert_mode(&mut self) {
self.insert_mode = !self.insert_mode;
self.move_cursor(0);
}
fn write_byte_at_cursor(&mut self, c: u8) {
match self.selection_start {
Some(_) => self.delete_at_cursor(false),
None => ()
}
let byte_offset = self.cursor_pos / 2;
if self.insert_mode || self.cursor_at_end() {
self.do_action(UndoAction::Insert(byte_offset, vec!(c)), true);
} else {
self.do_action(UndoAction::Write(byte_offset, vec!(c)), true);
}
}
fn move_cursor(&mut self, pos: isize) {
self.cursor_pos += pos;
self.update_cursor()
}
fn set_cursor(&mut self, pos: isize) {
self.cursor_pos = pos;
self.update_cursor()
}
fn update_cursor(&mut self) {
self.cursor_pos = cmp::max(self.cursor_pos, 0);
self.cursor_pos = cmp::min(self.cursor_pos, self.data_size);
if self.cursor_pos < self.data_offset {
self.data_offset = (self.cursor_pos / self.nibble_width) * self.nibble_width;
}
if self.cursor_pos > (self.data_offset + self.nibble_size - 1) {
let end_row = self.cursor_pos - (self.cursor_pos % self.nibble_width) -
self.nibble_size + self.nibble_width;
self.data_offset = end_row;
}
}
fn toggle_selection(&mut self) {
match self.selection_start {
Some(_) => self.selection_start = None,
None => self.selection_start = Some(self.cursor_pos)
}
let st = format!("selection = {:?}", self.selection_start);
self.status(st.clone());
}
fn goto(&mut self, pos: isize) {
self.status(format!("Going to {:?}", pos));
self.set_cursor(pos * 2);
}
fn find_buf(&mut self, needle: &[u8]) {
let found_pos = match self.buffer.find_from((self.cursor_pos / 2) as usize, needle) {
None => {
self.buffer.find_from(0, needle)
}
a => a
};
match found_pos {
Some(pos) => {
self.status(format!("Found at {:?}", pos));
self.set_cursor((pos * 2) as isize);
}
None => {
self.status(format!("Nothing found!"));
}
};
}
fn read_cursor_to_clipboard(&mut self) -> Option<usize> {
let (start, stop) = match self.selection_start {
None => { return None; },
Some(selection_pos) => {
(cmp::min(selection_pos, self.cursor_pos) / 2,
cmp::max(selection_pos, self.cursor_pos) / 2)
}
};
let data = self.buffer.read(start as usize, stop as usize);
let data_len = data.len();
self.clipboard = Some(data);
Some(data_len)
}
fn edit_copy(&mut self) {
match self.read_cursor_to_clipboard() {
Some(data_len) => self.status(format!("Copied {}", data_len)),
None => ()
}
}
fn edit_cut(&mut self) {
match self.read_cursor_to_clipboard() {
Some(data_len) => {
self.delete_at_cursor(false);
self.status(format!("Cut {}", data_len));
}
None => ()
}
}
fn edit_paste(&mut self) {
let data;
match self.clipboard {
Some(ref d) => { data = d.clone(); },
None => { return; }
};
let pos_div2 = self.cursor_pos / 2;
self.do_action(UndoAction::Insert(pos_div2, data), true);
}
fn view_input(&mut self, key: Key) {
let action = self.input.editor_input(key);
if action.is_none() {
return;
}
match action.unwrap() {
// Movement
HexEditActions::MoveLeft if self.nibble_active => self.move_cursor(-1),
HexEditActions::MoveRight if self.nibble_active => self.move_cursor(1),
HexEditActions::MoveLeft if !self.nibble_active => self.move_cursor(-2),
HexEditActions::MoveRight if !self.nibble_active => self.move_cursor(2),
HexEditActions::MoveUp => {
let t = -self.nibble_width;
self.move_cursor(t)
}
HexEditActions::MoveDown => {
let t = self.nibble_width;
self.move_cursor(t)
}
HexEditActions::MovePageUp => {
let t = -(self.nibble_size - self.nibble_width) / 2;
self.move_cursor(t)
}
HexEditActions::MovePageDown => {
let t = (self.nibble_size - self.nibble_width) / 2;
self.move_cursor(t)
}
// UndoAction::Delete
HexEditActions::Delete => self.delete_at_cursor(false),
HexEditActions::DeleteWithMove => self.delete_at_cursor(true),
// Ctrl X, C V
HexEditActions::CutSelection => self.edit_cut(),
HexEditActions::CopySelection => self.edit_copy(),
HexEditActions::PasteSelection => self.edit_paste(),
// Hex input for nibble view
HexEditActions::Edit(ch) if self.nibble_active => {
match ch.to_digit(16) {
Some(val) => {
self.write_nibble_at_cursor(val as u8);
self.move_cursor(1);
}
None => () // TODO: Show error?
}
},
// Ascii edit for byte view
HexEditActions::Edit(ch) if !self.nibble_active => {
if ch.len_utf8() == 1 && ch.is_alphanumeric() {
// TODO: Make it printable rather than alphanumeric
self.write_byte_at_cursor(ch as u8);
self.move_cursor(2);
} else {
// TODO: Show error?
}
}
HexEditActions::SwitchView => {
self.nibble_active = !self.nibble_active;
let t = self.nibble_active;
self.status(format!("nibble_active = {:?}", t));
},
HexEditActions::HelpView => self.start_help(),
HexEditActions::ToggleInsert => self.toggle_insert_mode(),
HexEditActions::ToggleSelecion => self.toggle_selection(),
HexEditActions::Undo => self.undo(),
HexEditActions::AskGoto => self.start_goto(),
HexEditActions::AskFind => self.start_find(),
HexEditActions::AskOpen => self.start_open(),
HexEditActions::AskSave => self.start_save(),
_ => self.status(format!("key = {:?}", key)),
}
}
fn start_help(&mut self) {
let help_text = include_str!("Help.txt");
let ref sr = self.signal_receiver.as_mut().unwrap();
let mut ot = OverlayText::with_text(help_text.to_string());
ot.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.overlay = None;
}));
self.overlay = Some(ot);
}
fn start_goto(&mut self) {
let mut gt = GotoInputLine::new();
// let mut sender_clone0 = self.sender.clone();
let ref sr = self.signal_receiver.as_mut().unwrap();
gt.on_done.connect(signal!(sr with |obj, pos| {
obj.goto(pos*2);
obj.input_entry = None;
}));
gt.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(gt) as Box<InputLine>)
}
fn start_find(&mut self) {
let mut find_line = FindInputLine::new();
let ref sr = self.signal_receiver.as_mut().unwrap();
find_line.on_find.connect(signal!(sr with |obj, needle| {
obj.find_buf(&needle);
obj.input_entry = None;
}));
find_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(find_line) as Box<InputLine>)
}
fn start_save(&mut self) {
let mut path_line = PathInputLine::new("Save: ".into());
let ref sr = self.signal_receiver.as_mut().unwrap();
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.save(&path);
obj.input_entry = None;
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(path_line) as Box<InputLine>)
}
fn start_open(&mut self) {
let mut path_line = PathInputLine::new("Open: ".into());
let ref sr = self.signal_receiver.as_mut().unwrap();
path_line.on_done.connect(signal!(sr with |obj, path| {
obj.open(&path);
obj.input_entry = None;
}));
path_line.on_cancel.connect(signal!(sr with |obj, opt_msg| {
match opt_msg {
Some(ref msg) => obj.status(msg.clone()),
None => ()
};
obj.input_entry = None;
}));
self.input_entry = Some(Box::new(path_line) as Box<InputLine>)
}
fn process_msgs(&mut self) {
let mut sr = self.signal_receiver.take().unwrap();
sr.run(self);
self.signal_receiver = Some(sr);
}
pub fn input(&mut self, key: Key) {
self.process_msgs();
match self.overlay {
Some(ref mut overlay) => {
overlay.input(&self.input, key);
return;
}
None => ()
}
match self.input_entry {
Some(ref mut input_entry) => {
input_entry.input(&self.input, key);
return;
}
None => ()
}
self.view_input(key);
self.process_msgs();
}
fn recalculate(&mut self) {
self.data_size = (self.buffer.len() * 2) as isize;
let (new_width, new_height) = (self.cur_width as i32, (self.cur_height + 1) as i32);
self.resize(new_width, new_height);
}
pub fn resize(&mut self, width: i32, height: i32) {
self.cur_height = (height as isize) - 1;
self.cur_width = width as isize;
self.nibble_start = if self.data_size / 2 <= 0xFFFF { 1 + 4 } else { 2 + 8 };
self.nibble_width = 2 * ((self.cur_width - self.nibble_start) / 4);
self.nibble_size = self.nibble_width * self.cur_height;
}
}
|
// This file is released under the same terms as Rust itself.
use pipeline::{self, PipelineId};
use std;
use std::collections::HashMap;
use std::convert::{From, Into};
use std::fmt::{self, Debug, Display, Formatter};
use std::fs::File;
use std::io::Read;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::FromStr;
use std::sync::mpsc::{Sender, Receiver};
use vcs;
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Repo {
pub path: PathBuf,
pub origin: String,
pub master_branch: String,
pub staging_branch: String,
}
pub struct Worker {
repos: HashMap<PipelineId, Repo>,
executable: String,
}
impl Worker {
pub fn new(
executable: String,
) -> Worker {
Worker{
repos: HashMap::new(),
executable: executable,
}
}
pub fn add_pipeline(&mut self, pipeline_id: PipelineId, repo: Repo) {
self.repos.insert(pipeline_id, repo);
}
}
impl pipeline::Worker<vcs::Event<Commit>, vcs::Message<Commit>> for Worker {
fn run(
&mut self,
recv_msg: Receiver<vcs::Message<Commit>>,
mut send_event: Sender<vcs::Event<Commit>>
) {
loop {
self.handle_message(
recv_msg.recv().expect("Pipeline went away"),
&mut send_event,
);
}
}
}
macro_rules! try_cmd {
($e:expr) => ({
let cmd = try!($e);
if !cmd.status.success() {
return Err(GitError::Cli(
cmd.status,
String::from_utf8_lossy(&cmd.stderr).into_owned()
));
}
cmd
})
}
impl Worker {
fn handle_message(
&self,
msg: vcs::Message<Commit>,
send_event: &mut Sender<vcs::Event<Commit>>
) {
match msg {
vcs::Message::MergeToStaging(
pipeline_id, pull_commit, message, remote
) => {
let repo = match self.repos.get(&pipeline_id) {
Some(repo) => repo,
None => {
warn!("Got wrong pipeline ID {:?}", pipeline_id);
return;
}
};
info!("Merging {} ...", pull_commit);
match self.merge_to_staging(
repo, pull_commit, &message, &remote
) {
Err(e) => {
warn!(
"Failed to merge {} to staging: {:?}",
pull_commit,
e
);
send_event.send(vcs::Event::FailedMergeToStaging(
pipeline_id,
pull_commit,
)).expect("Pipeline gone merge to staging error");
}
Ok(merge_commit) => {
info!("Merged {} to {}", pull_commit, merge_commit);
send_event.send(vcs::Event::MergedToStaging(
pipeline_id,
pull_commit,
merge_commit,
)).expect("Pipeline gone merge to staging");
}
}
}
vcs::Message::MoveStagingToMaster(pipeline_id, merge_commit) => {
let repo = match self.repos.get(&pipeline_id) {
Some(repo) => repo,
None => {
warn!("Got wrong pipeline ID {:?}", pipeline_id);
return;
}
};
info!("Moving {} ...", merge_commit);
match self.move_staging_to_master(repo, merge_commit) {
Err(e) => {
warn!(
"Failed to move {} to master: {:?}",
merge_commit,
e
);
send_event.send(vcs::Event::FailedMoveToMaster(
pipeline_id,
merge_commit,
)).expect("Pipeline gone move to master error");
}
Ok(()) => {
info!("Moved {} to master", merge_commit);
send_event.send(vcs::Event::MovedToMaster(
pipeline_id,
merge_commit,
)).expect("Pipeline gone move to master");
}
}
}
}
}
fn merge_to_staging(
&self,
repo: &Repo,
pull_commit: Commit,
message: &str,
remote: &str,
) -> Result<Commit, GitError> {
try!(self.setup_dir(repo));
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("fetch")
.arg("origin")
.arg(&repo.master_branch)
.arg(remote)
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("checkout")
.arg(format!("origin/{}", repo.master_branch))
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("branch")
.arg("-f")
.arg(&repo.staging_branch)
.arg(format!("origin/{}", repo.master_branch))
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("checkout")
.arg(&repo.staging_branch)
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("merge")
.arg("--no-ff")
.arg("-m")
.arg(message)
.arg(&pull_commit.to_string())
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("push")
.arg("-f")
.arg("origin")
.arg(&repo.staging_branch)
.output());
let mut commit_string = String::new();
try!(try!(File::open(
Path::new(&repo.path)
.join(".git/refs/heads/")
.join(&repo.staging_branch)
)).read_to_string(&mut commit_string));
commit_string = commit_string.replace("\n", "").replace("\r", "");
Commit::from_str(&commit_string).map_err(|e| e.into())
}
fn move_staging_to_master(
&self,
repo: &Repo,
merge_commit: Commit,
) -> Result<(), GitError> {
try!(self.setup_dir(repo));
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("push")
.arg("-f")
.arg("origin")
.arg(format!("{}:{}", merge_commit, &repo.master_branch))
.output());
Ok(())
}
fn setup_dir(&self, repo: &Repo) -> Result<(), GitError> {
if !repo.path.exists() {
try_cmd!(Command::new(&self.executable)
.arg("init")
.arg(&repo.path)
.output());
try_cmd!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("remote")
.arg("add")
.arg("origin")
.arg(&repo.origin)
.output());
} else {
try!(Command::new(&self.executable)
.arg("-C")
.arg(&repo.path)
.arg("merge")
.arg("--abort")
.output());
}
Ok(())
}
}
quick_error! {
#[derive(Debug)]
pub enum GitError {
Int(err: std::num::ParseIntError) {
cause(err)
from()
}
Io(err: std::io::Error) {
cause(err)
from()
}
Cli(status: std::process::ExitStatus, output: String) {}
}
}
// A git commit is a SHA1 sum. A SHA1 sum is a 160-bit number.
#[derive(Copy, Clone, Deserialize, Eq, PartialEq, Serialize)]
pub struct Commit(u64, u64, u32);
impl vcs::Commit for Commit {}
impl Display for Commit {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{:016x}{:016x}{:08x}", self.0, self.1, self.2)
}
}
impl Debug for Commit {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Commit({:016x}{:016x}{:08x})", self.0, self.1, self.2)
}
}
// It should be easy to get from hexadecimal.
impl FromStr for Commit {
type Err = ParseIntError;
fn from_str(mut s: &str) -> Result<Commit, ParseIntError> {
if s.len() != 40 {
s = "THIS_IS_NOT_A_NUMBER_BUT_I_CANT_MAKE_PARSEINTERROR_MYSELF";
}
let a = try!(u64::from_str_radix(&s[0..16], 16));
let b = try!(u64::from_str_radix(&s[16..32], 16));
let c = try!(u32::from_str_radix(&s[32..40], 16));
Ok(Commit(a, b, c))
}
}
impl Into<String> for Commit {
fn into(self) -> String {
self.to_string()
}
}
Just set the CWD ourselves.
Fixes git 1.8, which is used in CentOS 7.
// This file is released under the same terms as Rust itself.
use pipeline::{self, PipelineId};
use std;
use std::collections::HashMap;
use std::convert::{From, Into};
use std::fmt::{self, Debug, Display, Formatter};
use std::fs::File;
use std::io::Read;
use std::num::ParseIntError;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::str::FromStr;
use std::sync::mpsc::{Sender, Receiver};
use vcs;
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Repo {
pub path: PathBuf,
pub origin: String,
pub master_branch: String,
pub staging_branch: String,
}
pub struct Worker {
repos: HashMap<PipelineId, Repo>,
executable: String,
}
impl Worker {
pub fn new(
executable: String,
) -> Worker {
Worker{
repos: HashMap::new(),
executable: executable,
}
}
pub fn add_pipeline(&mut self, pipeline_id: PipelineId, repo: Repo) {
self.repos.insert(pipeline_id, repo);
}
}
impl pipeline::Worker<vcs::Event<Commit>, vcs::Message<Commit>> for Worker {
fn run(
&mut self,
recv_msg: Receiver<vcs::Message<Commit>>,
mut send_event: Sender<vcs::Event<Commit>>
) {
loop {
self.handle_message(
recv_msg.recv().expect("Pipeline went away"),
&mut send_event,
);
}
}
}
macro_rules! try_cmd {
($e:expr) => ({
let cmd = try!($e);
if !cmd.status.success() {
return Err(GitError::Cli(
cmd.status,
String::from_utf8_lossy(&cmd.stderr).into_owned()
));
}
cmd
})
}
impl Worker {
fn handle_message(
&self,
msg: vcs::Message<Commit>,
send_event: &mut Sender<vcs::Event<Commit>>
) {
match msg {
vcs::Message::MergeToStaging(
pipeline_id, pull_commit, message, remote
) => {
let repo = match self.repos.get(&pipeline_id) {
Some(repo) => repo,
None => {
warn!("Got wrong pipeline ID {:?}", pipeline_id);
return;
}
};
info!("Merging {} ...", pull_commit);
match self.merge_to_staging(
repo, pull_commit, &message, &remote
) {
Err(e) => {
warn!(
"Failed to merge {} to staging: {:?}",
pull_commit,
e
);
send_event.send(vcs::Event::FailedMergeToStaging(
pipeline_id,
pull_commit,
)).expect("Pipeline gone merge to staging error");
}
Ok(merge_commit) => {
info!("Merged {} to {}", pull_commit, merge_commit);
send_event.send(vcs::Event::MergedToStaging(
pipeline_id,
pull_commit,
merge_commit,
)).expect("Pipeline gone merge to staging");
}
}
}
vcs::Message::MoveStagingToMaster(pipeline_id, merge_commit) => {
let repo = match self.repos.get(&pipeline_id) {
Some(repo) => repo,
None => {
warn!("Got wrong pipeline ID {:?}", pipeline_id);
return;
}
};
info!("Moving {} ...", merge_commit);
match self.move_staging_to_master(repo, merge_commit) {
Err(e) => {
warn!(
"Failed to move {} to master: {:?}",
merge_commit,
e
);
send_event.send(vcs::Event::FailedMoveToMaster(
pipeline_id,
merge_commit,
)).expect("Pipeline gone move to master error");
}
Ok(()) => {
info!("Moved {} to master", merge_commit);
send_event.send(vcs::Event::MovedToMaster(
pipeline_id,
merge_commit,
)).expect("Pipeline gone move to master");
}
}
}
}
}
fn merge_to_staging(
&self,
repo: &Repo,
pull_commit: Commit,
message: &str,
remote: &str,
) -> Result<Commit, GitError> {
try!(self.setup_dir(repo));
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("fetch")
.arg("origin")
.arg(&repo.master_branch)
.arg(remote)
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("checkout")
.arg(format!("origin/{}", repo.master_branch))
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("branch")
.arg("-f")
.arg(&repo.staging_branch)
.arg(format!("origin/{}", repo.master_branch))
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("checkout")
.arg(&repo.staging_branch)
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("merge")
.arg("--no-ff")
.arg("-m")
.arg(message)
.arg(&pull_commit.to_string())
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("push")
.arg("-f")
.arg("origin")
.arg(&repo.staging_branch)
.output());
let mut commit_string = String::new();
try!(try!(File::open(
Path::new(&repo.path)
.join(".git/refs/heads/")
.join(&repo.staging_branch)
)).read_to_string(&mut commit_string));
commit_string = commit_string.replace("\n", "").replace("\r", "");
Commit::from_str(&commit_string).map_err(|e| e.into())
}
fn move_staging_to_master(
&self,
repo: &Repo,
merge_commit: Commit,
) -> Result<(), GitError> {
try!(self.setup_dir(repo));
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("push")
.arg("-f")
.arg("origin")
.arg(format!("{}:{}", merge_commit, &repo.master_branch))
.output());
Ok(())
}
fn setup_dir(&self, repo: &Repo) -> Result<(), GitError> {
if !repo.path.exists() {
try_cmd!(Command::new(&self.executable)
.arg("init")
.arg(&repo.path)
.output());
try_cmd!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("remote")
.arg("add")
.arg("origin")
.arg(&repo.origin)
.output());
} else {
try!(Command::new(&self.executable)
.current_dir(&repo.path)
.arg("merge")
.arg("--abort")
.output());
}
Ok(())
}
}
quick_error! {
#[derive(Debug)]
pub enum GitError {
Int(err: std::num::ParseIntError) {
cause(err)
from()
}
Io(err: std::io::Error) {
cause(err)
from()
}
Cli(status: std::process::ExitStatus, output: String) {}
}
}
// A git commit is a SHA1 sum. A SHA1 sum is a 160-bit number.
#[derive(Copy, Clone, Deserialize, Eq, PartialEq, Serialize)]
pub struct Commit(u64, u64, u32);
impl vcs::Commit for Commit {}
impl Display for Commit {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{:016x}{:016x}{:08x}", self.0, self.1, self.2)
}
}
impl Debug for Commit {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Commit({:016x}{:016x}{:08x})", self.0, self.1, self.2)
}
}
// It should be easy to get from hexadecimal.
impl FromStr for Commit {
type Err = ParseIntError;
fn from_str(mut s: &str) -> Result<Commit, ParseIntError> {
if s.len() != 40 {
s = "THIS_IS_NOT_A_NUMBER_BUT_I_CANT_MAKE_PARSEINTERROR_MYSELF";
}
let a = try!(u64::from_str_radix(&s[0..16], 16));
let b = try!(u64::from_str_radix(&s[16..32], 16));
let c = try!(u32::from_str_radix(&s[32..40], 16));
Ok(Commit(a, b, c))
}
}
impl Into<String> for Commit {
fn into(self) -> String {
self.to_string()
}
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use syntax::{ast, visit};
use syntax::codemap::{self, CodeMap, Span, BytePos};
use syntax::parse::ParseSess;
use strings::string_buffer::StringBuffer;
use Indent;
use utils::{self, CodeMapSpanUtils};
use config::Config;
use rewrite::{Rewrite, RewriteContext};
use comment::rewrite_comment;
use macros::rewrite_macro;
use items::{rewrite_static, rewrite_associated_type, rewrite_type_alias, format_impl, format_trait};
pub struct FmtVisitor<'a> {
pub parse_session: &'a ParseSess,
pub codemap: &'a CodeMap,
pub buffer: StringBuffer,
pub last_pos: BytePos,
// FIXME: use an RAII util or closure for indenting
pub block_indent: Indent,
pub config: &'a Config,
}
impl<'a> FmtVisitor<'a> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt.node {
ast::StmtKind::Decl(ref decl, _) => {
if let ast::DeclKind::Item(ref item) = decl.node {
self.visit_item(item);
} else {
let rewrite = stmt.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent);
self.push_rewrite(stmt.span, rewrite);
}
}
ast::StmtKind::Expr(..) |
ast::StmtKind::Semi(..) => {
let rewrite = stmt.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent);
self.push_rewrite(stmt.span, rewrite);
}
ast::StmtKind::Mac(ref mac, _macro_style, _) => {
self.format_missing_with_indent(stmt.span.lo);
self.visit_mac(mac, None);
}
}
}
pub fn visit_block(&mut self, b: &ast::Block) {
debug!("visit_block: {:?} {:?}",
self.codemap.lookup_char_pos(b.span.lo),
self.codemap.lookup_char_pos(b.span.hi));
// Check if this block has braces.
let snippet = self.snippet(b.span);
let has_braces = snippet.starts_with("{") || snippet.starts_with("unsafe");
let brace_compensation = if has_braces {
BytePos(1)
} else {
BytePos(0)
};
self.last_pos = self.last_pos + brace_compensation;
self.block_indent = self.block_indent.block_indent(self.config);
self.buffer.push_str("{");
for stmt in &b.stmts {
self.visit_stmt(&stmt)
}
if let Some(ref e) = b.expr {
self.format_missing_with_indent(e.span.lo);
let rewrite = e.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent)
.unwrap_or_else(|| self.snippet(e.span));
self.buffer.push_str(&rewrite);
self.last_pos = e.span.hi;
if utils::semicolon_for_expr(e) {
self.buffer.push_str(";");
}
}
// FIXME: we should compress any newlines here to just one
self.format_missing_with_indent(b.span.hi - brace_compensation);
self.close_block();
self.last_pos = b.span.hi;
}
// FIXME: this is a terrible hack to indent the comments between the last
// item in the block and the closing brace to the block's level.
// The closing brace itself, however, should be indented at a shallower
// level.
fn close_block(&mut self) {
let total_len = self.buffer.len;
let chars_too_many = if self.config.hard_tabs {
1
} else {
self.config.tab_spaces
};
self.buffer.truncate(total_len - chars_too_many);
self.buffer.push_str("}");
self.block_indent = self.block_indent.block_unindent(self.config);
}
// Note that this only gets called for function definitions. Required methods
// on traits do not get handled here.
fn visit_fn(&mut self,
fk: visit::FnKind,
fd: &ast::FnDecl,
b: &ast::Block,
s: Span,
_: ast::NodeId) {
let indent = self.block_indent;
let rewrite = match fk {
visit::FnKind::ItemFn(ident, ref generics, unsafety, constness, abi, vis) => {
self.rewrite_fn(indent,
ident,
fd,
None,
generics,
unsafety,
constness,
abi,
vis,
codemap::mk_sp(s.lo, b.span.lo),
&b)
}
visit::FnKind::Method(ident, ref sig, vis) => {
self.rewrite_fn(indent,
ident,
fd,
Some(&sig.explicit_self),
&sig.generics,
sig.unsafety,
sig.constness,
sig.abi,
vis.unwrap_or(&ast::Visibility::Inherited),
codemap::mk_sp(s.lo, b.span.lo),
&b)
}
visit::FnKind::Closure => None,
};
if let Some(fn_str) = rewrite {
self.format_missing_with_indent(s.lo);
self.buffer.push_str(&fn_str);
if let Some(c) = fn_str.chars().last() {
if c == '}' {
self.last_pos = b.span.hi;
return;
}
}
} else {
self.format_missing(b.span.lo);
}
self.last_pos = b.span.lo;
self.visit_block(b)
}
fn visit_item(&mut self, item: &ast::Item) {
// Only look at attributes for modules (except for rustfmt_skip) if the
// module is inline. We want to avoid looking at attributes in another
// file, which the AST doesn't distinguish.
match item.node {
ast::ItemKind::Mod(ref m) => {
let outer_file = self.codemap.lookup_char_pos(item.span.lo).file;
let inner_file = self.codemap.lookup_char_pos(m.inner.lo).file;
if outer_file.name == inner_file.name {
if self.visit_attrs(&item.attrs) {
self.push_rewrite(item.span, None);
return;
}
} else if utils::contains_skip(&item.attrs) {
return;
}
}
_ => {
if self.visit_attrs(&item.attrs) {
self.push_rewrite(item.span, None);
return;
}
}
}
match item.node {
ast::ItemKind::Use(ref vp) => {
self.format_import(&item.vis, vp, item.span);
}
ast::ItemKind::Impl(..) => {
self.format_missing_with_indent(item.span.lo);
if let Some(impl_str) = format_impl(&self.get_context(), item, self.block_indent) {
self.buffer.push_str(&impl_str);
self.last_pos = item.span.hi;
}
}
ast::ItemKind::Trait(..) => {
self.format_missing_with_indent(item.span.lo);
if let Some(trait_str) = format_trait(&self.get_context(),
item,
self.block_indent) {
self.buffer.push_str(&trait_str);
self.last_pos = item.span.hi;
}
}
ast::ItemKind::ExternCrate(_) => {
self.format_missing_with_indent(item.span.lo);
let new_str = self.snippet(item.span);
self.buffer.push_str(&new_str);
self.last_pos = item.span.hi;
}
ast::ItemKind::Struct(ref def, ref generics) => {
let rewrite = {
let indent = self.block_indent;
let context = self.get_context();
::items::format_struct(&context,
"struct ",
item.ident,
&item.vis,
def,
Some(generics),
item.span,
indent)
.map(|s| {
match *def {
ast::VariantData::Tuple(..) => s + ";",
_ => s,
}
})
};
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::Enum(ref def, ref generics) => {
self.format_missing_with_indent(item.span.lo);
self.visit_enum(item.ident, &item.vis, def, generics, item.span);
self.last_pos = item.span.hi;
}
ast::ItemKind::Mod(ref module) => {
self.format_missing_with_indent(item.span.lo);
self.format_mod(module, &item.vis, item.span, item.ident);
}
ast::ItemKind::Mac(ref mac) => {
self.format_missing_with_indent(item.span.lo);
self.visit_mac(mac, Some(item.ident));
}
ast::ItemKind::ForeignMod(ref foreign_mod) => {
self.format_missing_with_indent(item.span.lo);
self.format_foreign_mod(foreign_mod, item.span);
}
ast::ItemKind::Static(ref ty, mutability, ref expr) => {
let rewrite = rewrite_static("static",
&item.vis,
item.ident,
ty,
mutability,
Some(expr),
&self.get_context());
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::Const(ref ty, ref expr) => {
let rewrite = rewrite_static("const",
&item.vis,
item.ident,
ty,
ast::Mutability::Immutable,
Some(expr),
&self.get_context());
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::DefaultImpl(..) => {
// FIXME(#78): format impl definitions.
}
ast::ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
self.visit_fn(visit::FnKind::ItemFn(item.ident,
generics,
unsafety,
constness,
abi,
&item.vis),
decl,
body,
item.span,
item.id)
}
ast::ItemKind::Ty(ref ty, ref generics) => {
let rewrite = rewrite_type_alias(&self.get_context(),
self.block_indent,
item.ident,
ty,
generics,
&item.vis,
item.span);
self.push_rewrite(item.span, rewrite);
}
}
}
pub fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
if self.visit_attrs(&ti.attrs) {
return;
}
match ti.node {
ast::TraitItemKind::Const(ref ty, ref expr_opt) => {
let rewrite = rewrite_static("const",
&ast::Visibility::Inherited,
ti.ident,
ty,
ast::Mutability::Immutable,
expr_opt.as_ref(),
&self.get_context());
self.push_rewrite(ti.span, rewrite);
}
ast::TraitItemKind::Method(ref sig, None) => {
let indent = self.block_indent;
let rewrite = self.rewrite_required_fn(indent, ti.ident, sig, ti.span);
self.push_rewrite(ti.span, rewrite);
}
ast::TraitItemKind::Method(ref sig, Some(ref body)) => {
self.visit_fn(visit::FnKind::Method(ti.ident, sig, None),
&sig.decl,
&body,
ti.span,
ti.id);
}
ast::TraitItemKind::Type(ref type_param_bounds, _) => {
let rewrite = rewrite_associated_type(ti.ident,
None,
Some(type_param_bounds),
&self.get_context(),
self.block_indent);
self.push_rewrite(ti.span, rewrite);
}
}
}
pub fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
if self.visit_attrs(&ii.attrs) {
return;
}
match ii.node {
ast::ImplItemKind::Method(ref sig, ref body) => {
self.visit_fn(visit::FnKind::Method(ii.ident, sig, Some(&ii.vis)),
&sig.decl,
body,
ii.span,
ii.id);
}
ast::ImplItemKind::Const(ref ty, ref expr) => {
let rewrite = rewrite_static("const",
&ii.vis,
ii.ident,
ty,
ast::Mutability::Immutable,
Some(expr),
&self.get_context());
self.push_rewrite(ii.span, rewrite);
}
ast::ImplItemKind::Type(ref ty) => {
let rewrite = rewrite_associated_type(ii.ident,
Some(ty),
None,
&self.get_context(),
self.block_indent);
self.push_rewrite(ii.span, rewrite);
}
ast::ImplItemKind::Macro(ref mac) => {
self.format_missing_with_indent(ii.span.lo);
self.visit_mac(mac, Some(ii.ident));
}
}
}
fn visit_mac(&mut self, mac: &ast::Mac, ident: Option<ast::Ident>) {
// 1 = ;
let width = self.config.max_width - self.block_indent.width() - 1;
let rewrite = rewrite_macro(mac, ident, &self.get_context(), width, self.block_indent);
if let Some(res) = rewrite {
self.buffer.push_str(&res);
self.last_pos = mac.span.hi;
}
}
fn push_rewrite(&mut self, span: Span, rewrite: Option<String>) {
self.format_missing_with_indent(span.lo);
let result = rewrite.unwrap_or_else(|| self.snippet(span));
self.buffer.push_str(&result);
self.last_pos = span.hi;
}
pub fn from_codemap(parse_session: &'a ParseSess, config: &'a Config) -> FmtVisitor<'a> {
FmtVisitor {
parse_session: parse_session,
codemap: parse_session.codemap(),
buffer: StringBuffer::new(),
last_pos: BytePos(0),
block_indent: Indent {
block_indent: 0,
alignment: 0,
},
config: config,
}
}
pub fn snippet(&self, span: Span) -> String {
match self.codemap.span_to_snippet(span) {
Ok(s) => s,
Err(_) => {
println!("Couldn't make snippet for span {:?}->{:?}",
self.codemap.lookup_char_pos(span.lo),
self.codemap.lookup_char_pos(span.hi));
"".to_owned()
}
}
}
// Returns true if we should skip the following item.
pub fn visit_attrs(&mut self, attrs: &[ast::Attribute]) -> bool {
if utils::contains_skip(attrs) {
return true;
}
let outers: Vec<_> = attrs.iter()
.filter(|a| a.node.style == ast::AttrStyle::Outer)
.cloned()
.collect();
if outers.is_empty() {
return false;
}
let first = &outers[0];
self.format_missing_with_indent(first.span.lo);
let rewrite = outers.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent)
.unwrap();
self.buffer.push_str(&rewrite);
let last = outers.last().unwrap();
self.last_pos = last.span.hi;
false
}
fn walk_mod_items(&mut self, m: &ast::Mod) {
for item in &m.items {
self.visit_item(&item);
}
}
fn format_mod(&mut self, m: &ast::Mod, vis: &ast::Visibility, s: Span, ident: ast::Ident) {
// Decide whether this is an inline mod or an external mod.
let local_file_name = self.codemap.span_to_filename(s);
let is_internal = local_file_name == self.codemap.span_to_filename(m.inner);
if let Some(vis) = utils::format_visibility(vis) {
self.buffer.push_str(vis);
}
self.buffer.push_str("mod ");
self.buffer.push_str(&ident.to_string());
if is_internal {
self.buffer.push_str(" {");
// Hackery to account for the closing }.
let mod_lo = self.codemap.span_after(s, "{");
let body_snippet = self.snippet(codemap::mk_sp(mod_lo, m.inner.hi - BytePos(1)));
let body_snippet = body_snippet.trim();
if body_snippet.is_empty() {
self.buffer.push_str("}");
} else {
self.last_pos = mod_lo;
self.block_indent = self.block_indent.block_indent(self.config);
self.walk_mod_items(m);
self.format_missing_with_indent(m.inner.hi - BytePos(1));
self.close_block();
}
self.last_pos = m.inner.hi;
} else {
self.buffer.push_str(";");
self.last_pos = s.hi;
}
}
pub fn format_separate_mod(&mut self, m: &ast::Mod) {
let filemap = self.codemap.lookup_char_pos(m.inner.lo).file;
self.last_pos = filemap.start_pos;
self.block_indent = Indent::empty();
self.walk_mod_items(m);
self.format_missing(filemap.end_pos);
}
fn format_import(&mut self, vis: &ast::Visibility, vp: &ast::ViewPath, span: Span) {
let vis = match utils::format_visibility(vis) {
Some(s) => s,
None => return,
};
let mut offset = self.block_indent;
offset.alignment += vis.len() + "use ".len();
// 1 = ";"
match vp.rewrite(&self.get_context(),
self.config.max_width - offset.width() - 1,
offset) {
Some(ref s) if s.is_empty() => {
// Format up to last newline
let prev_span = codemap::mk_sp(self.last_pos, span.lo);
let span_end = match self.snippet(prev_span).rfind('\n') {
Some(offset) => self.last_pos + BytePos(offset as u32),
None => span.lo,
};
self.format_missing(span_end);
self.last_pos = span.hi;
}
Some(ref s) => {
let s = format!("{}use {};", vis, s);
self.format_missing_with_indent(span.lo);
self.buffer.push_str(&s);
self.last_pos = span.hi;
}
None => {
self.format_missing_with_indent(span.lo);
self.format_missing(span.hi);
}
}
}
pub fn get_context(&self) -> RewriteContext {
RewriteContext {
parse_session: self.parse_session,
codemap: self.codemap,
config: self.config,
block_indent: self.block_indent,
}
}
}
impl<'a> Rewrite for [ast::Attribute] {
fn rewrite(&self, context: &RewriteContext, _: usize, offset: Indent) -> Option<String> {
let mut result = String::new();
if self.is_empty() {
return Some(result);
}
let indent = offset.to_string(context.config);
for (i, a) in self.iter().enumerate() {
let mut a_str = context.snippet(a.span);
// Write comments and blank lines between attributes.
if i > 0 {
let comment = context.snippet(codemap::mk_sp(self[i - 1].span.hi, a.span.lo));
// This particular horror show is to preserve line breaks in between doc
// comments. An alternative would be to force such line breaks to start
// with the usual doc comment token.
let multi_line = a_str.starts_with("//") && comment.matches('\n').count() > 1;
let comment = comment.trim();
if !comment.is_empty() {
let comment = try_opt!(rewrite_comment(comment,
false,
context.config.ideal_width -
offset.width(),
offset,
context.config));
result.push_str(&indent);
result.push_str(&comment);
result.push('\n');
} else if multi_line {
result.push('\n');
}
result.push_str(&indent);
}
if a_str.starts_with("//") {
a_str = try_opt!(rewrite_comment(&a_str,
false,
context.config.ideal_width - offset.width(),
offset,
context.config));
}
// Write the attribute itself.
result.push_str(&a_str);
if i < self.len() - 1 {
result.push('\n');
}
}
Some(result)
}
}
Altered FmtVisitor to function correctly on generated code
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use syntax::{ast, visit};
use syntax::codemap::{self, CodeMap, Span, BytePos};
use syntax::parse::ParseSess;
use strings::string_buffer::StringBuffer;
use Indent;
use utils::{self, CodeMapSpanUtils};
use config::Config;
use rewrite::{Rewrite, RewriteContext};
use comment::rewrite_comment;
use macros::rewrite_macro;
use items::{rewrite_static, rewrite_associated_type, rewrite_type_alias, format_impl, format_trait};
// For format_missing and last_pos, need to use the source callsite (if applicable).
// Required as generated code spans aren't guaranteed to follow on from the last span.
macro_rules! source {
($this:ident, $sp: expr) => {
$this.codemap.source_callsite($sp)
}
}
pub struct FmtVisitor<'a> {
pub parse_session: &'a ParseSess,
pub codemap: &'a CodeMap,
pub buffer: StringBuffer,
pub last_pos: BytePos,
// FIXME: use an RAII util or closure for indenting
pub block_indent: Indent,
pub config: &'a Config,
}
impl<'a> FmtVisitor<'a> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt.node {
ast::StmtKind::Decl(ref decl, _) => {
if let ast::DeclKind::Item(ref item) = decl.node {
self.visit_item(item);
} else {
let rewrite = stmt.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent);
self.push_rewrite(stmt.span, rewrite);
}
}
ast::StmtKind::Expr(..) |
ast::StmtKind::Semi(..) => {
let rewrite = stmt.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent);
self.push_rewrite(stmt.span, rewrite);
}
ast::StmtKind::Mac(ref mac, _macro_style, _) => {
self.format_missing_with_indent(source!(self, stmt.span).lo);
self.visit_mac(mac, None);
}
}
}
pub fn visit_block(&mut self, b: &ast::Block) {
debug!("visit_block: {:?} {:?}",
self.codemap.lookup_char_pos(b.span.lo),
self.codemap.lookup_char_pos(b.span.hi));
// Check if this block has braces.
let snippet = self.snippet(b.span);
let has_braces = snippet.starts_with("{") || snippet.starts_with("unsafe");
let brace_compensation = if has_braces {
BytePos(1)
} else {
BytePos(0)
};
self.last_pos = self.last_pos + brace_compensation;
self.block_indent = self.block_indent.block_indent(self.config);
self.buffer.push_str("{");
for stmt in &b.stmts {
self.visit_stmt(&stmt)
}
if let Some(ref e) = b.expr {
self.format_missing_with_indent(source!(self, e.span).lo);
let rewrite = e.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent)
.unwrap_or_else(|| self.snippet(e.span));
self.buffer.push_str(&rewrite);
self.last_pos = source!(self, e.span).hi;
if utils::semicolon_for_expr(e) {
self.buffer.push_str(";");
}
}
// FIXME: we should compress any newlines here to just one
self.format_missing_with_indent(source!(self, b.span).hi - brace_compensation);
self.close_block();
self.last_pos = source!(self, b.span).hi;
}
// FIXME: this is a terrible hack to indent the comments between the last
// item in the block and the closing brace to the block's level.
// The closing brace itself, however, should be indented at a shallower
// level.
fn close_block(&mut self) {
let total_len = self.buffer.len;
let chars_too_many = if self.config.hard_tabs {
1
} else {
self.config.tab_spaces
};
self.buffer.truncate(total_len - chars_too_many);
self.buffer.push_str("}");
self.block_indent = self.block_indent.block_unindent(self.config);
}
// Note that this only gets called for function definitions. Required methods
// on traits do not get handled here.
fn visit_fn(&mut self,
fk: visit::FnKind,
fd: &ast::FnDecl,
b: &ast::Block,
s: Span,
_: ast::NodeId) {
let indent = self.block_indent;
let rewrite = match fk {
visit::FnKind::ItemFn(ident, ref generics, unsafety, constness, abi, vis) => {
self.rewrite_fn(indent,
ident,
fd,
None,
generics,
unsafety,
constness,
abi,
vis,
codemap::mk_sp(s.lo, b.span.lo),
&b)
}
visit::FnKind::Method(ident, ref sig, vis) => {
self.rewrite_fn(indent,
ident,
fd,
Some(&sig.explicit_self),
&sig.generics,
sig.unsafety,
sig.constness,
sig.abi,
vis.unwrap_or(&ast::Visibility::Inherited),
codemap::mk_sp(s.lo, b.span.lo),
&b)
}
visit::FnKind::Closure => None,
};
if let Some(fn_str) = rewrite {
self.format_missing_with_indent(source!(self, s).lo);
self.buffer.push_str(&fn_str);
if let Some(c) = fn_str.chars().last() {
if c == '}' {
self.last_pos = source!(self, b.span).hi;
return;
}
}
} else {
self.format_missing(source!(self, b.span).lo);
}
self.last_pos = source!(self, b.span).lo;
self.visit_block(b)
}
fn visit_item(&mut self, item: &ast::Item) {
// Only look at attributes for modules (except for rustfmt_skip) if the
// module is inline. We want to avoid looking at attributes in another
// file, which the AST doesn't distinguish.
match item.node {
ast::ItemKind::Mod(ref m) => {
let outer_file = self.codemap.lookup_char_pos(item.span.lo).file;
let inner_file = self.codemap.lookup_char_pos(m.inner.lo).file;
if outer_file.name == inner_file.name {
if self.visit_attrs(&item.attrs) {
self.push_rewrite(item.span, None);
return;
}
} else if utils::contains_skip(&item.attrs) {
return;
}
}
_ => {
if self.visit_attrs(&item.attrs) {
self.push_rewrite(item.span, None);
return;
}
}
}
match item.node {
ast::ItemKind::Use(ref vp) => {
self.format_import(&item.vis, vp, item.span);
}
ast::ItemKind::Impl(..) => {
self.format_missing_with_indent(source!(self, item.span).lo);
if let Some(impl_str) = format_impl(&self.get_context(), item, self.block_indent) {
self.buffer.push_str(&impl_str);
self.last_pos = source!(self, item.span).hi;
}
}
ast::ItemKind::Trait(..) => {
self.format_missing_with_indent(item.span.lo);
if let Some(trait_str) = format_trait(&self.get_context(),
item,
self.block_indent) {
self.buffer.push_str(&trait_str);
self.last_pos = source!(self, item.span).hi;
}
}
ast::ItemKind::ExternCrate(_) => {
self.format_missing_with_indent(source!(self, item.span).lo);
let new_str = self.snippet(item.span);
self.buffer.push_str(&new_str);
self.last_pos = source!(self, item.span).hi;
}
ast::ItemKind::Struct(ref def, ref generics) => {
let rewrite = {
let indent = self.block_indent;
let context = self.get_context();
::items::format_struct(&context,
"struct ",
item.ident,
&item.vis,
def,
Some(generics),
item.span,
indent)
.map(|s| {
match *def {
ast::VariantData::Tuple(..) => s + ";",
_ => s,
}
})
};
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::Enum(ref def, ref generics) => {
self.format_missing_with_indent(source!(self, item.span).lo);
self.visit_enum(item.ident, &item.vis, def, generics, item.span);
self.last_pos = source!(self, item.span).hi;
}
ast::ItemKind::Mod(ref module) => {
self.format_missing_with_indent(source!(self, item.span).lo);
self.format_mod(module, &item.vis, item.span, item.ident);
}
ast::ItemKind::Mac(ref mac) => {
self.format_missing_with_indent(source!(self, item.span).lo);
self.visit_mac(mac, Some(item.ident));
}
ast::ItemKind::ForeignMod(ref foreign_mod) => {
self.format_missing_with_indent(source!(self, item.span).lo);
self.format_foreign_mod(foreign_mod, item.span);
}
ast::ItemKind::Static(ref ty, mutability, ref expr) => {
let rewrite = rewrite_static("static",
&item.vis,
item.ident,
ty,
mutability,
Some(expr),
&self.get_context());
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::Const(ref ty, ref expr) => {
let rewrite = rewrite_static("const",
&item.vis,
item.ident,
ty,
ast::Mutability::Immutable,
Some(expr),
&self.get_context());
self.push_rewrite(item.span, rewrite);
}
ast::ItemKind::DefaultImpl(..) => {
// FIXME(#78): format impl definitions.
}
ast::ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => {
self.visit_fn(visit::FnKind::ItemFn(item.ident,
generics,
unsafety,
constness,
abi,
&item.vis),
decl,
body,
item.span,
item.id)
}
ast::ItemKind::Ty(ref ty, ref generics) => {
let rewrite = rewrite_type_alias(&self.get_context(),
self.block_indent,
item.ident,
ty,
generics,
&item.vis,
item.span);
self.push_rewrite(item.span, rewrite);
}
}
}
pub fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
if self.visit_attrs(&ti.attrs) {
return;
}
match ti.node {
ast::TraitItemKind::Const(ref ty, ref expr_opt) => {
let rewrite = rewrite_static("const",
&ast::Visibility::Inherited,
ti.ident,
ty,
ast::Mutability::Immutable,
expr_opt.as_ref(),
&self.get_context());
self.push_rewrite(ti.span, rewrite);
}
ast::TraitItemKind::Method(ref sig, None) => {
let indent = self.block_indent;
let rewrite = self.rewrite_required_fn(indent, ti.ident, sig, ti.span);
self.push_rewrite(ti.span, rewrite);
}
ast::TraitItemKind::Method(ref sig, Some(ref body)) => {
self.visit_fn(visit::FnKind::Method(ti.ident, sig, None),
&sig.decl,
&body,
ti.span,
ti.id);
}
ast::TraitItemKind::Type(ref type_param_bounds, _) => {
let rewrite = rewrite_associated_type(ti.ident,
None,
Some(type_param_bounds),
&self.get_context(),
self.block_indent);
self.push_rewrite(ti.span, rewrite);
}
}
}
pub fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
if self.visit_attrs(&ii.attrs) {
return;
}
match ii.node {
ast::ImplItemKind::Method(ref sig, ref body) => {
self.visit_fn(visit::FnKind::Method(ii.ident, sig, Some(&ii.vis)),
&sig.decl,
body,
ii.span,
ii.id);
}
ast::ImplItemKind::Const(ref ty, ref expr) => {
let rewrite = rewrite_static("const",
&ii.vis,
ii.ident,
ty,
ast::Mutability::Immutable,
Some(expr),
&self.get_context());
self.push_rewrite(ii.span, rewrite);
}
ast::ImplItemKind::Type(ref ty) => {
let rewrite = rewrite_associated_type(ii.ident,
Some(ty),
None,
&self.get_context(),
self.block_indent);
self.push_rewrite(ii.span, rewrite);
}
ast::ImplItemKind::Macro(ref mac) => {
self.format_missing_with_indent(source!(self, ii.span).lo);
self.visit_mac(mac, Some(ii.ident));
}
}
}
fn visit_mac(&mut self, mac: &ast::Mac, ident: Option<ast::Ident>) {
// 1 = ;
let width = self.config.max_width - self.block_indent.width() - 1;
let rewrite = rewrite_macro(mac, ident, &self.get_context(), width, self.block_indent);
if let Some(res) = rewrite {
self.buffer.push_str(&res);
self.last_pos = source!(self, mac.span).hi;
}
}
fn push_rewrite(&mut self, span: Span, rewrite: Option<String>) {
self.format_missing_with_indent(source!(self, span).lo);
let result = rewrite.unwrap_or_else(|| self.snippet(span));
self.buffer.push_str(&result);
self.last_pos = source!(self, span).hi;
}
pub fn from_codemap(parse_session: &'a ParseSess, config: &'a Config) -> FmtVisitor<'a> {
FmtVisitor {
parse_session: parse_session,
codemap: parse_session.codemap(),
buffer: StringBuffer::new(),
last_pos: BytePos(0),
block_indent: Indent {
block_indent: 0,
alignment: 0,
},
config: config,
}
}
pub fn snippet(&self, span: Span) -> String {
match self.codemap.span_to_snippet(span) {
Ok(s) => s,
Err(_) => {
println!("Couldn't make snippet for span {:?}->{:?}",
self.codemap.lookup_char_pos(span.lo),
self.codemap.lookup_char_pos(span.hi));
"".to_owned()
}
}
}
// Returns true if we should skip the following item.
pub fn visit_attrs(&mut self, attrs: &[ast::Attribute]) -> bool {
if utils::contains_skip(attrs) {
return true;
}
let outers: Vec<_> = attrs.iter()
.filter(|a| a.node.style == ast::AttrStyle::Outer)
.cloned()
.collect();
if outers.is_empty() {
return false;
}
let first = &outers[0];
self.format_missing_with_indent(source!(self, first.span).lo);
let rewrite = outers.rewrite(&self.get_context(),
self.config.max_width - self.block_indent.width(),
self.block_indent)
.unwrap();
self.buffer.push_str(&rewrite);
let last = outers.last().unwrap();
self.last_pos = source!(self, last.span).hi;
false
}
fn walk_mod_items(&mut self, m: &ast::Mod) {
for item in &m.items {
self.visit_item(&item);
}
}
fn format_mod(&mut self, m: &ast::Mod, vis: &ast::Visibility, s: Span, ident: ast::Ident) {
// Decide whether this is an inline mod or an external mod.
let local_file_name = self.codemap.span_to_filename(s);
let is_internal = local_file_name == self.codemap.span_to_filename(source!(self, m.inner));
if let Some(vis) = utils::format_visibility(vis) {
self.buffer.push_str(vis);
}
self.buffer.push_str("mod ");
self.buffer.push_str(&ident.to_string());
if is_internal {
self.buffer.push_str(" {");
// Hackery to account for the closing }.
let mod_lo = self.codemap.span_after(source!(self, s), "{");
let body_snippet =
self.snippet(codemap::mk_sp(mod_lo, source!(self, m.inner).hi - BytePos(1)));
let body_snippet = body_snippet.trim();
if body_snippet.is_empty() {
self.buffer.push_str("}");
} else {
self.last_pos = mod_lo;
self.block_indent = self.block_indent.block_indent(self.config);
self.walk_mod_items(m);
self.format_missing_with_indent(source!(self, m.inner).hi - BytePos(1));
self.close_block();
}
self.last_pos = source!(self, m.inner).hi;
} else {
self.buffer.push_str(";");
self.last_pos = source!(self, s).hi;
}
}
pub fn format_separate_mod(&mut self, m: &ast::Mod) {
let filemap = self.codemap.lookup_char_pos(source!(self, m.inner).lo).file;
self.last_pos = filemap.start_pos;
self.block_indent = Indent::empty();
self.walk_mod_items(m);
self.format_missing(filemap.end_pos);
}
fn format_import(&mut self, vis: &ast::Visibility, vp: &ast::ViewPath, span: Span) {
let vis = match utils::format_visibility(vis) {
Some(s) => s,
None => return,
};
let mut offset = self.block_indent;
offset.alignment += vis.len() + "use ".len();
// 1 = ";"
match vp.rewrite(&self.get_context(),
self.config.max_width - offset.width() - 1,
offset) {
Some(ref s) if s.is_empty() => {
// Format up to last newline
let prev_span = codemap::mk_sp(self.last_pos, source!(self, span).lo);
let span_end = match self.snippet(prev_span).rfind('\n') {
Some(offset) => self.last_pos + BytePos(offset as u32),
None => source!(self, span).lo,
};
self.format_missing(span_end);
self.last_pos = source!(self, span).hi;
}
Some(ref s) => {
let s = format!("{}use {};", vis, s);
self.format_missing_with_indent(source!(self, span).lo);
self.buffer.push_str(&s);
self.last_pos = source!(self, span).hi;
}
None => {
self.format_missing_with_indent(source!(self, span).lo);
self.format_missing(source!(self, span).hi);
}
}
}
pub fn get_context(&self) -> RewriteContext {
RewriteContext {
parse_session: self.parse_session,
codemap: self.codemap,
config: self.config,
block_indent: self.block_indent,
}
}
}
impl<'a> Rewrite for [ast::Attribute] {
fn rewrite(&self, context: &RewriteContext, _: usize, offset: Indent) -> Option<String> {
let mut result = String::new();
if self.is_empty() {
return Some(result);
}
let indent = offset.to_string(context.config);
for (i, a) in self.iter().enumerate() {
let mut a_str = context.snippet(a.span);
// Write comments and blank lines between attributes.
if i > 0 {
let comment = context.snippet(codemap::mk_sp(self[i - 1].span.hi, a.span.lo));
// This particular horror show is to preserve line breaks in between doc
// comments. An alternative would be to force such line breaks to start
// with the usual doc comment token.
let multi_line = a_str.starts_with("//") && comment.matches('\n').count() > 1;
let comment = comment.trim();
if !comment.is_empty() {
let comment = try_opt!(rewrite_comment(comment,
false,
context.config.ideal_width -
offset.width(),
offset,
context.config));
result.push_str(&indent);
result.push_str(&comment);
result.push('\n');
} else if multi_line {
result.push('\n');
}
result.push_str(&indent);
}
if a_str.starts_with("//") {
a_str = try_opt!(rewrite_comment(&a_str,
false,
context.config.ideal_width - offset.width(),
offset,
context.config));
}
// Write the attribute itself.
result.push_str(&a_str);
if i < self.len() - 1 {
result.push('\n');
}
}
Some(result)
}
}
|
//! Web interface of cratesfyi
pub(crate) mod page;
use log::{debug, info};
/// ctry! (cratesfyitry) is extremely similar to try! and itry!
/// except it returns an error page response instead of plain Err.
macro_rules! ctry {
($result:expr) => {
match $result {
Ok(v) => v,
Err(e) => {
return $crate::web::page::Page::new(format!("{:?}", e))
.title("An error has occured")
.set_status(::iron::status::BadRequest)
.to_resp("resp");
}
}
};
}
/// cexpect will check an option and if it's not Some
/// it will return an error page response
macro_rules! cexpect {
($option:expr) => {
match $option {
Some(v) => v,
None => {
return $crate::web::page::Page::new("Resource not found".to_owned())
.title("An error has occured")
.set_status(::iron::status::BadRequest)
.to_resp("resp");
}
}
};
}
/// Gets an extension from Request
macro_rules! extension {
($req:expr, $ext:ty) => {
cexpect!($req.extensions.get::<$ext>())
};
}
mod builds;
mod crate_details;
mod error;
mod file;
pub(crate) mod metrics;
mod pool;
mod releases;
mod routes;
mod rustdoc;
mod sitemap;
mod source;
use self::pool::Pool;
use handlebars_iron::{DirectorySource, HandlebarsEngine};
use iron::headers::{CacheControl, CacheDirective, ContentType, Expires, HttpDate};
use iron::modifiers::Redirect;
use iron::prelude::*;
use iron::{self, status, Handler, Listening, Url};
use postgres::Connection;
use router::NoRoute;
use rustc_serialize::json::{Json, ToJson};
use semver::{Version, VersionReq};
use staticfile::Static;
use std::collections::BTreeMap;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::time::Duration;
use std::{env, fmt};
use time;
#[cfg(test)]
use std::sync::{Arc, Mutex};
/// Duration of static files for staticfile and DatabaseFileHandler (in seconds)
const STATIC_FILE_CACHE_DURATION: u64 = 60 * 60 * 24 * 30 * 12; // 12 months
const STYLE_CSS: &str = include_str!(concat!(env!("OUT_DIR"), "/style.css"));
const MENU_JS: &str = include_str!(concat!(env!("OUT_DIR"), "/menu.js"));
const INDEX_JS: &str = include_str!(concat!(env!("OUT_DIR"), "/index.js"));
const OPENSEARCH_XML: &[u8] = include_bytes!("opensearch.xml");
const DEFAULT_BIND: &str = "0.0.0.0:3000";
type PoolFactoryFn = dyn Fn() -> Pool + Send + Sync;
type PoolFactory = Box<PoolFactoryFn>;
struct CratesfyiHandler {
shared_resource_handler: Box<dyn Handler>,
router_handler: Box<dyn Handler>,
database_file_handler: Box<dyn Handler>,
static_handler: Box<dyn Handler>,
pool_factory: PoolFactory,
}
impl CratesfyiHandler {
fn chain<H: Handler>(pool_factory: &PoolFactoryFn, base: H) -> Chain {
// TODO: Use DocBuilderOptions for paths
let mut hbse = HandlebarsEngine::new();
hbse.add(Box::new(DirectorySource::new("./templates", ".hbs")));
// load templates
if let Err(e) = hbse.reload() {
panic!("Failed to load handlebar templates: {}", e);
}
let mut chain = Chain::new(base);
chain.link_before(pool_factory());
chain.link_after(hbse);
chain
}
fn new(pool_factory: PoolFactory) -> CratesfyiHandler {
let routes = routes::build_routes();
let blacklisted_prefixes = routes.page_prefixes();
let shared_resources = Self::chain(&pool_factory, rustdoc::SharedResourceHandler);
let router_chain = Self::chain(&pool_factory, routes.iron_router());
let prefix = PathBuf::from(
env::var("CRATESFYI_PREFIX")
.expect("the CRATESFYI_PREFIX environment variable is not set"),
)
.join("public_html");
let static_handler =
Static::new(prefix).cache(Duration::from_secs(STATIC_FILE_CACHE_DURATION));
CratesfyiHandler {
shared_resource_handler: Box::new(shared_resources),
router_handler: Box::new(router_chain),
database_file_handler: Box::new(routes::BlockBlacklistedPrefixes::new(
blacklisted_prefixes,
Box::new(file::DatabaseFileHandler),
)),
static_handler: Box::new(static_handler),
pool_factory,
}
}
}
impl Handler for CratesfyiHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
// try serving shared rustdoc resources first, then router, then db/static file handler
// return 404 if none of them return Ok
self.shared_resource_handler
.handle(req)
.or_else(|e| self.router_handler.handle(req).or(Err(e)))
.or_else(|e| {
// if router fails try to serve files from database first
self.database_file_handler.handle(req).or(Err(e))
})
.or_else(|e| {
// and then try static handler. if all of them fails, return 404
self.static_handler.handle(req).or(Err(e))
})
.or_else(|e| {
let err = if let Some(err) = e.error.downcast::<error::Nope>() {
*err
} else if e.error.downcast::<NoRoute>().is_some() {
error::Nope::ResourceNotFound
} else {
panic!("all cratesfyi errors should be of type Nope");
};
if let error::Nope::ResourceNotFound = err {
// print the path of the URL that triggered a 404 error
struct DebugPath<'a>(&'a iron::Url);
impl<'a> fmt::Display for DebugPath<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for path_elem in self.0.path() {
write!(f, "/{}", path_elem)?;
}
if let Some(query) = self.0.query() {
write!(f, "?{}", query)?;
}
if let Some(hash) = self.0.fragment() {
write!(f, "#{}", hash)?;
}
Ok(())
}
}
debug!("Path not found: {}", DebugPath(&req.url));
}
Self::chain(&self.pool_factory, err).handle(req)
})
}
}
struct MatchVersion {
/// Represents the crate name that was found when attempting to load a crate release.
///
/// `match_version` will attempt to match a provided crate name against similar crate names with
/// dashes (`-`) replaced with underscores (`_`) and vice versa.
pub corrected_name: Option<String>,
pub version: MatchSemver,
}
impl MatchVersion {
/// If the matched version was an exact match to the requested crate name, returns the
/// `MatchSemver` for the query. If the lookup required a dash/underscore conversion, returns
/// `None`.
fn assume_exact(self) -> Option<MatchSemver> {
if self.corrected_name.is_none() {
Some(self.version)
} else {
None
}
}
}
/// Represents the possible results of attempting to load a version requirement.
/// The id (i32) of the release is stored to simplify successive queries.
#[derive(Debug, Clone, PartialEq, Eq)]
enum MatchSemver {
/// `match_version` was given an exact version, which matched a saved crate version.
Exact((String, i32)),
/// `match_version` was given a semver version requirement, which matched the given saved crate
/// version.
Semver((String, i32)),
}
impl MatchSemver {
/// Discard information about whether the loaded version was an exact match, and return the
/// matched version string and id.
pub fn into_parts(self) -> (String, i32) {
match self {
MatchSemver::Exact((v, i)) | MatchSemver::Semver((v, i)) => (v, i),
}
}
}
/// Checks the database for crate releases that match the given name and version.
///
/// `version` may be an exact version number or loose semver version requirement. The return value
/// will indicate whether the given version exactly matched a version number from the database.
///
/// This function will also check for crates where dashes in the name (`-`) have been replaced with
/// underscores (`_`) and vice-versa. The return value will indicate whether the crate name has
/// been matched exactly, or if there has been a "correction" in the name that matched instead.
fn match_version(conn: &Connection, name: &str, version: Option<&str>) -> Option<MatchVersion> {
// version is an Option<&str> from router::Router::get
// need to decode first
use url::percent_encoding::percent_decode;
let req_version = version
.and_then(|v| match percent_decode(v.as_bytes()).decode_utf8() {
Ok(p) => Some(p),
Err(_) => None,
})
.map(|v| {
if v == "newest" || v == "latest" {
"*".into()
} else {
v
}
})
.unwrap_or_else(|| "*".into());
let mut corrected_name = None;
let versions: Vec<(String, i32)> = {
let query = "SELECT name, version, releases.id
FROM releases INNER JOIN crates ON releases.crate_id = crates.id
WHERE normalize_crate_name(name) = normalize_crate_name($1) AND yanked = false";
let rows = conn.query(query, &[&name]).unwrap();
let mut rows = rows.iter().peekable();
if let Some(row) = rows.peek() {
let db_name = row.get(0);
if db_name != name {
corrected_name = Some(db_name);
}
};
rows.map(|row| (row.get(1), row.get(2))).collect()
};
// first check for exact match
// we can't expect users to use semver in query
for version in &versions {
if version.0 == req_version {
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Exact(version.clone()),
});
}
}
// Now try to match with semver
let req_sem_ver = VersionReq::parse(&req_version).ok()?;
// we need to sort versions first
let versions_sem = {
let mut versions_sem: Vec<(Version, i32)> = Vec::new();
for version in &versions {
// in theory a crate must always have a semver compatible version
// but check result just in case
versions_sem.push((Version::parse(&version.0).ok()?, version.1));
}
versions_sem.sort();
versions_sem.reverse();
versions_sem
};
for version in &versions_sem {
if req_sem_ver.matches(&version.0) {
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Semver((version.0.to_string(), version.1)),
});
}
}
// semver is acting weird for '*' (any) range if a crate only have pre-release versions
// return first version if requested version is '*'
if req_version == "*" && !versions_sem.is_empty() {
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Semver((versions_sem[0].0.to_string(), versions_sem[0].1)),
});
}
None
}
/// Wrapper around the Markdown parser and renderer to render markdown
fn render_markdown(text: &str) -> String {
use comrak::{markdown_to_html, ComrakOptions};
let options = {
let mut options = ComrakOptions::default();
options.safe = true;
options.ext_superscript = true;
options.ext_table = true;
options.ext_autolink = true;
options.ext_tasklist = true;
options.ext_strikethrough = true;
options
};
markdown_to_html(text, &options)
}
pub struct Server {
inner: Listening,
}
impl Server {
pub fn start(addr: Option<&str>) -> Self {
let server = Self::start_inner(addr.unwrap_or(DEFAULT_BIND), Box::new(Pool::new));
info!("Running docs.rs web server on http://{}", server.addr());
server
}
#[cfg(test)]
pub(crate) fn start_test(conn: Arc<Mutex<Connection>>) -> Self {
Self::start_inner(
"127.0.0.1:0",
Box::new(move || Pool::new_simple(conn.clone())),
)
}
fn start_inner(addr: &str, pool_factory: PoolFactory) -> Self {
// poke all the metrics counters to instantiate and register them
metrics::TOTAL_BUILDS.inc_by(0);
metrics::SUCCESSFUL_BUILDS.inc_by(0);
metrics::FAILED_BUILDS.inc_by(0);
metrics::NON_LIBRARY_BUILDS.inc_by(0);
metrics::UPLOADED_FILES_TOTAL.inc_by(0);
let cratesfyi = CratesfyiHandler::new(pool_factory);
let inner = Iron::new(cratesfyi)
.http(addr)
.unwrap_or_else(|_| panic!("Failed to bind to socket on {}", addr));
Server { inner }
}
pub(crate) fn addr(&self) -> SocketAddr {
self.inner.socket
}
/// Iron is bugged, and it never closes the server even when the listener is dropped. To
/// avoid never-ending tests this method forgets about the server, leaking it and allowing the
/// program to end.
///
/// The OS will then close all the dangling servers once the process exits.
///
/// https://docs.rs/iron/0.5/iron/struct.Listening.html#method.close
#[cfg(test)]
pub(crate) fn leak(self) {
std::mem::forget(self.inner);
}
}
/// Converts Timespec to nice readable relative time string
fn duration_to_str(ts: time::Timespec) -> String {
let tm = time::at(ts);
let delta = time::now() - tm;
if delta.num_days() > 5 {
format!("{}", tm.strftime("%b %d, %Y").unwrap())
} else if delta.num_days() > 1 {
format!("{} days ago", delta.num_days())
} else if delta.num_days() == 1 {
"one day ago".to_string()
} else if delta.num_hours() > 1 {
format!("{} hours ago", delta.num_hours())
} else if delta.num_hours() == 1 {
"an hour ago".to_string()
} else if delta.num_minutes() > 1 {
format!("{} minutes ago", delta.num_minutes())
} else if delta.num_minutes() == 1 {
"one minute ago".to_string()
} else if delta.num_seconds() > 0 {
format!("{} seconds ago", delta.num_seconds())
} else {
"just now".to_string()
}
}
/// Creates a `Response` which redirects to the given path on the scheme/host/port from the given
/// `Request`.
fn redirect(url: Url) -> Response {
let mut resp = Response::with((status::Found, Redirect(url)));
resp.headers.set(Expires(HttpDate(time::now())));
resp
}
fn redirect_base(req: &Request) -> String {
// Try to get the scheme from CloudFront first, and then from iron
let scheme = req
.headers
.get_raw("cloudfront-forwarded-proto")
.and_then(|values| values.get(0))
.and_then(|value| std::str::from_utf8(value).ok())
.filter(|proto| *proto == "http" || *proto == "https")
.unwrap_or_else(|| req.url.scheme());
// Only include the port if it's needed
let port = req.url.port();
if port == 80 {
format!("{}://{}", scheme, req.url.host())
} else {
format!("{}://{}:{}", scheme, req.url.host(), port)
}
}
fn style_css_handler(_: &mut Request) -> IronResult<Response> {
let mut response = Response::with((status::Ok, STYLE_CSS));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response
.headers
.set(ContentType("text/css".parse().unwrap()));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn load_js(file_path_str: &'static str) -> IronResult<Response> {
let mut response = Response::with((status::Ok, file_path_str));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response
.headers
.set(ContentType("application/javascript".parse().unwrap()));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn opensearch_xml_handler(_: &mut Request) -> IronResult<Response> {
let mut response = Response::with((status::Ok, OPENSEARCH_XML));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response.headers.set(ContentType(
"application/opensearchdescription+xml".parse().unwrap(),
));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn ico_handler(req: &mut Request) -> IronResult<Response> {
if let Some(&"favicon.ico") = req.url.path().last() {
// if we're looking for exactly "favicon.ico", we need to defer to the handler that loads
// from `public_html`, so return a 404 here to make the main handler carry on
Err(IronError::new(
error::Nope::ResourceNotFound,
status::NotFound,
))
} else {
// if we're looking for something like "favicon-20190317-1.35.0-nightly-c82834e2b.ico",
// redirect to the plain one so that the above branch can trigger with the correct filename
let url = ctry!(Url::parse(
&format!("{}/favicon.ico", redirect_base(req))[..]
));
Ok(redirect(url))
}
}
/// MetaData used in header
#[derive(Debug)]
pub(crate) struct MetaData {
name: String,
version: String,
description: Option<String>,
target_name: Option<String>,
rustdoc_status: bool,
pub default_target: String,
}
impl MetaData {
fn from_crate(conn: &Connection, name: &str, version: &str) -> Option<MetaData> {
if let Some(row) = &conn
.query(
"SELECT crates.name,
releases.version,
releases.description,
releases.target_name,
releases.rustdoc_status,
releases.default_target
FROM releases
INNER JOIN crates ON crates.id = releases.crate_id
WHERE crates.name = $1 AND releases.version = $2",
&[&name, &version],
)
.unwrap()
.iter()
.next()
{
return Some(MetaData {
name: row.get(0),
version: row.get(1),
description: row.get(2),
target_name: row.get(3),
rustdoc_status: row.get(4),
default_target: row.get(5),
});
}
None
}
}
impl ToJson for MetaData {
fn to_json(&self) -> Json {
let mut m: BTreeMap<String, Json> = BTreeMap::new();
m.insert("name".to_owned(), self.name.to_json());
m.insert("version".to_owned(), self.version.to_json());
m.insert("description".to_owned(), self.description.to_json());
m.insert("target_name".to_owned(), self.target_name.to_json());
m.insert("rustdoc_status".to_owned(), self.rustdoc_status.to_json());
m.insert("default_target".to_owned(), self.default_target.to_json());
m.to_json()
}
}
#[cfg(test)]
mod test {
use crate::test::*;
use crate::web::match_version;
use html5ever::tendril::TendrilSink;
fn release(version: &str, db: &TestDatabase) -> i32 {
db.fake_release()
.name("foo")
.version(version)
.create()
.unwrap()
}
fn version(v: Option<&str>, db: &TestDatabase) -> Option<String> {
match_version(&db.conn(), "foo", v)
.and_then(|version| version.assume_exact().map(|semver| semver.into_parts().0))
}
fn semver(version: &'static str) -> Option<String> {
Some(version.into())
}
fn exact(version: &'static str) -> Option<String> {
Some(version.into())
}
fn clipboard_is_present_for_path(path: &str, web: &TestFrontend) -> bool {
let data = web.get(path).send().unwrap().text().unwrap();
let node = kuchiki::parse_html().one(data);
node.select("#clipboard").unwrap().count() == 1
}
#[test]
fn test_index_returns_success() {
wrapper(|env| {
let web = env.frontend();
assert!(web.get("/").send()?.status().is_success());
Ok(())
});
}
#[test]
fn test_show_clipboard_for_crate_pages() {
wrapper(|env| {
env.db()
.fake_release()
.name("fake_crate")
.version("0.0.1")
.source_file("test.rs", &[])
.create()
.unwrap();
let web = env.frontend();
assert!(clipboard_is_present_for_path(
"/crate/fake_crate/0.0.1",
web
));
assert!(clipboard_is_present_for_path(
"/crate/fake_crate/0.0.1/source/",
web
));
Ok(())
});
}
#[test]
fn test_hide_clipboard_for_non_crate_pages() {
wrapper(|env| {
env.db()
.fake_release()
.name("fake_crate")
.version("0.0.1")
.create()
.unwrap();
let web = env.frontend();
assert!(!clipboard_is_present_for_path("/about", web));
assert!(!clipboard_is_present_for_path("/releases", web));
assert!(!clipboard_is_present_for_path("/", web));
assert!(!clipboard_is_present_for_path(
"/fake_crate/0.0.1/fake_crate",
web
));
assert!(!clipboard_is_present_for_path("/not/a/real/path", web));
Ok(())
});
}
#[test]
fn standard_library_redirects() {
wrapper(|env| {
let web = env.frontend();
for krate in &["std", "alloc", "core", "proc_macro", "test"] {
let target = format!("https://doc.rust-lang.org/stable/{}/", krate);
// with or without slash
assert_redirect(&format!("/{}", krate), &target, web)?;
assert_redirect(&format!("/{}/", krate), &target, web)?;
}
Ok(())
})
}
#[test]
fn binary_docs_redirect_to_crate() {
wrapper(|env| {
let db = env.db();
db.fake_release()
.name("bat")
.version("0.2.0")
.binary(true)
.create()
.unwrap();
let web = env.frontend();
assert_redirect("/bat/0.2.0", "/crate/bat/0.2.0", web)?;
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu", "/crate/bat/0.2.0", web)?;
/* TODO: this should work (https://github.com/rust-lang/docs.rs/issues/603)
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu/bat", "/crate/bat/0.2.0", web)?;
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu/bat/", "/crate/bat/0.2.0/", web)?;
*/
Ok(())
})
}
#[test]
fn can_view_source() {
wrapper(|env| {
let db = env.db();
db.fake_release()
.name("regex")
.version("0.3.0")
.source_file("src/main.rs", br#"println!("definitely valid rust")"#)
.create()
.unwrap();
let web = env.frontend();
assert_success("/crate/regex/0.3.0/source/src/main.rs", web)?;
assert_success("/crate/regex/0.3.0/source", web)?;
assert_success("/crate/regex/0.3.0/source/src", web)?;
assert_success("/regex/0.3.0/src/regex/main.rs", web)?;
Ok(())
})
}
#[test]
// https://github.com/rust-lang/docs.rs/issues/223
fn prereleases_are_not_considered_for_semver() {
wrapper(|env| {
let db = env.db();
let version = |v| version(v, db);
let release = |v| release(v, db);
release("0.3.1-pre");
assert_eq!(version(Some("*")), semver("0.3.1-pre"));
release("0.3.1-alpha");
assert_eq!(version(Some("0.3.1-alpha")), exact("0.3.1-alpha"));
release("0.3.0");
let three = semver("0.3.0");
assert_eq!(version(None), three);
// same thing but with "*"
assert_eq!(version(Some("*")), three);
// make sure exact matches still work
assert_eq!(version(Some("0.3.0")), exact("0.3.0"));
Ok(())
});
}
#[test]
// https://github.com/rust-lang/docs.rs/issues/221
fn yanked_crates_are_not_considered() {
wrapper(|env| {
let db = env.db();
let release_id = release("0.3.0", db);
let query = "UPDATE releases SET yanked = true WHERE id = $1 AND version = '0.3.0'";
db.conn().query(query, &[&release_id]).unwrap();
assert_eq!(version(None, db), None);
assert_eq!(version(Some("0.3"), db), None);
release("0.1.0+4.1", db);
assert_eq!(version(Some("0.1.0+4.1"), db), exact("0.1.0+4.1"));
assert_eq!(version(None, db), semver("0.1.0+4.1"));
Ok(())
});
}
#[test]
// vaguely related to https://github.com/rust-lang/docs.rs/issues/395
fn metadata_has_no_effect() {
wrapper(|env| {
let db = env.db();
release("0.1.0+4.1", db);
release("0.1.1", db);
assert_eq!(version(None, db), semver("0.1.1"));
release("0.5.1+zstd.1.4.4", db);
assert_eq!(version(None, db), semver("0.5.1+zstd.1.4.4"));
assert_eq!(version(Some("0.5"), db), semver("0.5.1+zstd.1.4.4"));
assert_eq!(
version(Some("0.5.1+zstd.1.4.4"), db),
exact("0.5.1+zstd.1.4.4")
);
Ok(())
});
}
}
Refactored web/mod.rs (#685)
Freed web/mod.rs from the tyranny of bad style
//! Web interface of cratesfyi
pub(crate) mod page;
use log::{debug, info};
/// ctry! (cratesfyitry) is extremely similar to try! and itry!
/// except it returns an error page response instead of plain Err.
macro_rules! ctry {
($result:expr) => {
match $result {
Ok(v) => v,
Err(e) => {
return $crate::web::page::Page::new(format!("{:?}", e))
.title("An error has occured")
.set_status(::iron::status::BadRequest)
.to_resp("resp");
}
}
};
}
/// cexpect will check an option and if it's not Some
/// it will return an error page response
macro_rules! cexpect {
($option:expr) => {
match $option {
Some(v) => v,
None => {
return $crate::web::page::Page::new("Resource not found".to_owned())
.title("An error has occured")
.set_status(::iron::status::BadRequest)
.to_resp("resp");
}
}
};
}
/// Gets an extension from Request
macro_rules! extension {
($req:expr, $ext:ty) => {
cexpect!($req.extensions.get::<$ext>())
};
}
mod builds;
mod crate_details;
mod error;
mod file;
pub(crate) mod metrics;
mod pool;
mod releases;
mod routes;
mod rustdoc;
mod sitemap;
mod source;
use self::pool::Pool;
use handlebars_iron::{DirectorySource, HandlebarsEngine};
use iron::headers::{CacheControl, CacheDirective, ContentType, Expires, HttpDate};
use iron::modifiers::Redirect;
use iron::prelude::*;
use iron::{self, status, Handler, Listening, Url};
use postgres::Connection;
use router::NoRoute;
use rustc_serialize::json::{Json, ToJson};
use semver::{Version, VersionReq};
use staticfile::Static;
use std::collections::BTreeMap;
use std::net::SocketAddr;
use std::path::PathBuf;
use std::time::Duration;
use std::{env, fmt};
use time;
#[cfg(test)]
use std::sync::{Arc, Mutex};
/// Duration of static files for staticfile and DatabaseFileHandler (in seconds)
const STATIC_FILE_CACHE_DURATION: u64 = 60 * 60 * 24 * 30 * 12; // 12 months
const STYLE_CSS: &str = include_str!(concat!(env!("OUT_DIR"), "/style.css"));
const MENU_JS: &str = include_str!(concat!(env!("OUT_DIR"), "/menu.js"));
const INDEX_JS: &str = include_str!(concat!(env!("OUT_DIR"), "/index.js"));
const OPENSEARCH_XML: &[u8] = include_bytes!("opensearch.xml");
const DEFAULT_BIND: &str = "0.0.0.0:3000";
type PoolFactoryFn = dyn Fn() -> Pool + Send + Sync;
type PoolFactory = Box<PoolFactoryFn>;
struct CratesfyiHandler {
shared_resource_handler: Box<dyn Handler>,
router_handler: Box<dyn Handler>,
database_file_handler: Box<dyn Handler>,
static_handler: Box<dyn Handler>,
pool_factory: PoolFactory,
}
impl CratesfyiHandler {
fn chain<H: Handler>(pool_factory: &PoolFactoryFn, base: H) -> Chain {
// TODO: Use DocBuilderOptions for paths
let mut hbse = HandlebarsEngine::new();
hbse.add(Box::new(DirectorySource::new("./templates", ".hbs")));
// load templates
if let Err(e) = hbse.reload() {
panic!("Failed to load handlebar templates: {}", e);
}
let mut chain = Chain::new(base);
chain.link_before(pool_factory());
chain.link_after(hbse);
chain
}
fn new(pool_factory: PoolFactory) -> CratesfyiHandler {
let routes = routes::build_routes();
let blacklisted_prefixes = routes.page_prefixes();
let shared_resources = Self::chain(&pool_factory, rustdoc::SharedResourceHandler);
let router_chain = Self::chain(&pool_factory, routes.iron_router());
let prefix = PathBuf::from(
env::var("CRATESFYI_PREFIX")
.expect("the CRATESFYI_PREFIX environment variable is not set"),
)
.join("public_html");
let static_handler =
Static::new(prefix).cache(Duration::from_secs(STATIC_FILE_CACHE_DURATION));
CratesfyiHandler {
shared_resource_handler: Box::new(shared_resources),
router_handler: Box::new(router_chain),
database_file_handler: Box::new(routes::BlockBlacklistedPrefixes::new(
blacklisted_prefixes,
Box::new(file::DatabaseFileHandler),
)),
static_handler: Box::new(static_handler),
pool_factory,
}
}
}
impl Handler for CratesfyiHandler {
fn handle(&self, req: &mut Request) -> IronResult<Response> {
// try serving shared rustdoc resources first, then router, then db/static file handler
// return 404 if none of them return Ok
self.shared_resource_handler
.handle(req)
.or_else(|e| self.router_handler.handle(req).or(Err(e)))
.or_else(|e| {
// if router fails try to serve files from database first
self.database_file_handler.handle(req).or(Err(e))
})
.or_else(|e| {
// and then try static handler. if all of them fails, return 404
self.static_handler.handle(req).or(Err(e))
})
.or_else(|e| {
let err = if let Some(err) = e.error.downcast::<error::Nope>() {
*err
} else if e.error.downcast::<NoRoute>().is_some() {
error::Nope::ResourceNotFound
} else {
panic!("all cratesfyi errors should be of type Nope");
};
if let error::Nope::ResourceNotFound = err {
// print the path of the URL that triggered a 404 error
struct DebugPath<'a>(&'a iron::Url);
impl<'a> fmt::Display for DebugPath<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for path_elem in self.0.path() {
write!(f, "/{}", path_elem)?;
}
if let Some(query) = self.0.query() {
write!(f, "?{}", query)?;
}
if let Some(hash) = self.0.fragment() {
write!(f, "#{}", hash)?;
}
Ok(())
}
}
debug!("Path not found: {}", DebugPath(&req.url));
}
Self::chain(&self.pool_factory, err).handle(req)
})
}
}
struct MatchVersion {
/// Represents the crate name that was found when attempting to load a crate release.
///
/// `match_version` will attempt to match a provided crate name against similar crate names with
/// dashes (`-`) replaced with underscores (`_`) and vice versa.
pub corrected_name: Option<String>,
pub version: MatchSemver,
}
impl MatchVersion {
/// If the matched version was an exact match to the requested crate name, returns the
/// `MatchSemver` for the query. If the lookup required a dash/underscore conversion, returns
/// `None`.
fn assume_exact(self) -> Option<MatchSemver> {
if self.corrected_name.is_none() {
Some(self.version)
} else {
None
}
}
}
/// Represents the possible results of attempting to load a version requirement.
/// The id (i32) of the release is stored to simplify successive queries.
#[derive(Debug, Clone, PartialEq, Eq)]
enum MatchSemver {
/// `match_version` was given an exact version, which matched a saved crate version.
Exact((String, i32)),
/// `match_version` was given a semver version requirement, which matched the given saved crate
/// version.
Semver((String, i32)),
}
impl MatchSemver {
/// Discard information about whether the loaded version was an exact match, and return the
/// matched version string and id.
pub fn into_parts(self) -> (String, i32) {
match self {
MatchSemver::Exact((v, i)) | MatchSemver::Semver((v, i)) => (v, i),
}
}
}
/// Checks the database for crate releases that match the given name and version.
///
/// `version` may be an exact version number or loose semver version requirement. The return value
/// will indicate whether the given version exactly matched a version number from the database.
///
/// This function will also check for crates where dashes in the name (`-`) have been replaced with
/// underscores (`_`) and vice-versa. The return value will indicate whether the crate name has
/// been matched exactly, or if there has been a "correction" in the name that matched instead.
fn match_version(conn: &Connection, name: &str, version: Option<&str>) -> Option<MatchVersion> {
// version is an Option<&str> from router::Router::get, need to decode first
use url::percent_encoding::percent_decode;
let req_version = version
.and_then(|v| percent_decode(v.as_bytes()).decode_utf8().ok())
.map(|v| {
if v == "newest" || v == "latest" {
"*".into()
} else {
v
}
})
.unwrap_or_else(|| "*".into());
let mut corrected_name = None;
let versions: Vec<(String, i32)> = {
let query = "SELECT name, version, releases.id
FROM releases INNER JOIN crates ON releases.crate_id = crates.id
WHERE normalize_crate_name(name) = normalize_crate_name($1) AND yanked = false";
let rows = conn.query(query, &[&name]).unwrap();
let mut rows = rows.iter().peekable();
if let Some(row) = rows.peek() {
let db_name = row.get(0);
if db_name != name {
corrected_name = Some(db_name);
}
};
rows.map(|row| (row.get(1), row.get(2))).collect()
};
// first check for exact match, we can't expect users to use semver in query
if let Some((version, id)) = versions.iter().find(|(vers, _)| vers == &req_version) {
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Exact((version.to_owned(), *id)),
});
}
// Now try to match with semver
let req_sem_ver = VersionReq::parse(&req_version).ok()?;
// we need to sort versions first
let versions_sem = {
let mut versions_sem: Vec<(Version, i32)> = Vec::new();
for version in &versions {
// in theory a crate must always have a semver compatible version, but check result just in case
versions_sem.push((Version::parse(&version.0).ok()?, version.1));
}
versions_sem.sort();
versions_sem.reverse();
versions_sem
};
if let Some((version, id)) = versions_sem
.iter()
.find(|(vers, _)| req_sem_ver.matches(vers))
{
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Semver((version.to_string(), *id)),
});
}
// semver is acting weird for '*' (any) range if a crate only have pre-release versions
// return first version if requested version is '*'
if req_version == "*" && !versions_sem.is_empty() {
return Some(MatchVersion {
corrected_name,
version: MatchSemver::Semver((versions_sem[0].0.to_string(), versions_sem[0].1)),
});
}
None
}
/// Wrapper around the Markdown parser and renderer to render markdown
fn render_markdown(text: &str) -> String {
use comrak::{markdown_to_html, ComrakOptions};
let options = {
let mut options = ComrakOptions::default();
options.safe = true;
options.ext_superscript = true;
options.ext_table = true;
options.ext_autolink = true;
options.ext_tasklist = true;
options.ext_strikethrough = true;
options
};
markdown_to_html(text, &options)
}
pub struct Server {
inner: Listening,
}
impl Server {
pub fn start(addr: Option<&str>) -> Self {
let server = Self::start_inner(addr.unwrap_or(DEFAULT_BIND), Box::new(Pool::new));
info!("Running docs.rs web server on http://{}", server.addr());
server
}
#[cfg(test)]
pub(crate) fn start_test(conn: Arc<Mutex<Connection>>) -> Self {
Self::start_inner(
"127.0.0.1:0",
Box::new(move || Pool::new_simple(conn.clone())),
)
}
fn start_inner(addr: &str, pool_factory: PoolFactory) -> Self {
// poke all the metrics counters to instantiate and register them
metrics::TOTAL_BUILDS.inc_by(0);
metrics::SUCCESSFUL_BUILDS.inc_by(0);
metrics::FAILED_BUILDS.inc_by(0);
metrics::NON_LIBRARY_BUILDS.inc_by(0);
metrics::UPLOADED_FILES_TOTAL.inc_by(0);
let cratesfyi = CratesfyiHandler::new(pool_factory);
let inner = Iron::new(cratesfyi)
.http(addr)
.unwrap_or_else(|_| panic!("Failed to bind to socket on {}", addr));
Server { inner }
}
pub(crate) fn addr(&self) -> SocketAddr {
self.inner.socket
}
/// Iron is bugged, and it never closes the server even when the listener is dropped. To
/// avoid never-ending tests this method forgets about the server, leaking it and allowing the
/// program to end.
///
/// The OS will then close all the dangling servers once the process exits.
///
/// https://docs.rs/iron/0.5/iron/struct.Listening.html#method.close
#[cfg(test)]
pub(crate) fn leak(self) {
std::mem::forget(self.inner);
}
}
/// Converts Timespec to nice readable relative time string
fn duration_to_str(ts: time::Timespec) -> String {
let tm = time::at(ts);
let delta = time::now() - tm;
let delta = (
delta.num_days(),
delta.num_hours(),
delta.num_minutes(),
delta.num_seconds(),
);
match delta {
(days, ..) if days > 5 => format!("{}", tm.strftime("%b %d, %Y").unwrap()),
(days @ 2..=5, ..) => format!("{} days ago", days),
(1, ..) => "one day ago".to_string(),
(_, hours, ..) if hours > 1 => format!("{} hours ago", hours),
(_, 1, ..) => "an hour ago".to_string(),
(_, _, minutes, _) if minutes > 1 => format!("{} minutes ago", minutes),
(_, _, 1, _) => "one minute ago".to_string(),
(_, _, _, seconds) if seconds > 0 => format!("{} seconds ago", seconds),
_ => "just now".to_string(),
}
}
/// Creates a `Response` which redirects to the given path on the scheme/host/port from the given
/// `Request`.
fn redirect(url: Url) -> Response {
let mut resp = Response::with((status::Found, Redirect(url)));
resp.headers.set(Expires(HttpDate(time::now())));
resp
}
fn redirect_base(req: &Request) -> String {
// Try to get the scheme from CloudFront first, and then from iron
let scheme = req
.headers
.get_raw("cloudfront-forwarded-proto")
.and_then(|values| values.get(0))
.and_then(|value| std::str::from_utf8(value).ok())
.filter(|proto| *proto == "http" || *proto == "https")
.unwrap_or_else(|| req.url.scheme());
// Only include the port if it's needed
let port = req.url.port();
if port == 80 {
format!("{}://{}", scheme, req.url.host())
} else {
format!("{}://{}:{}", scheme, req.url.host(), port)
}
}
fn style_css_handler(_: &mut Request) -> IronResult<Response> {
let mut response = Response::with((status::Ok, STYLE_CSS));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response
.headers
.set(ContentType("text/css".parse().unwrap()));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn load_js(file_path_str: &'static str) -> IronResult<Response> {
let mut response = Response::with((status::Ok, file_path_str));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response
.headers
.set(ContentType("application/javascript".parse().unwrap()));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn opensearch_xml_handler(_: &mut Request) -> IronResult<Response> {
let mut response = Response::with((status::Ok, OPENSEARCH_XML));
let cache = vec![
CacheDirective::Public,
CacheDirective::MaxAge(STATIC_FILE_CACHE_DURATION as u32),
];
response.headers.set(ContentType(
"application/opensearchdescription+xml".parse().unwrap(),
));
response.headers.set(CacheControl(cache));
Ok(response)
}
fn ico_handler(req: &mut Request) -> IronResult<Response> {
if let Some(&"favicon.ico") = req.url.path().last() {
// if we're looking for exactly "favicon.ico", we need to defer to the handler that loads
// from `public_html`, so return a 404 here to make the main handler carry on
Err(IronError::new(
error::Nope::ResourceNotFound,
status::NotFound,
))
} else {
// if we're looking for something like "favicon-20190317-1.35.0-nightly-c82834e2b.ico",
// redirect to the plain one so that the above branch can trigger with the correct filename
let url = ctry!(Url::parse(
&format!("{}/favicon.ico", redirect_base(req))[..]
));
Ok(redirect(url))
}
}
/// MetaData used in header
#[derive(Debug)]
pub(crate) struct MetaData {
name: String,
version: String,
description: Option<String>,
target_name: Option<String>,
rustdoc_status: bool,
pub default_target: String,
}
impl MetaData {
fn from_crate(conn: &Connection, name: &str, version: &str) -> Option<MetaData> {
let rows = conn
.query(
"SELECT crates.name,
releases.version,
releases.description,
releases.target_name,
releases.rustdoc_status,
releases.default_target
FROM releases
INNER JOIN crates ON crates.id = releases.crate_id
WHERE crates.name = $1 AND releases.version = $2",
&[&name, &version],
)
.unwrap();
let row = rows.iter().next()?;
Some(MetaData {
name: row.get(0),
version: row.get(1),
description: row.get(2),
target_name: row.get(3),
rustdoc_status: row.get(4),
default_target: row.get(5),
})
}
}
impl ToJson for MetaData {
fn to_json(&self) -> Json {
let mut m: BTreeMap<String, Json> = BTreeMap::new();
m.insert("name".to_owned(), self.name.to_json());
m.insert("version".to_owned(), self.version.to_json());
m.insert("description".to_owned(), self.description.to_json());
m.insert("target_name".to_owned(), self.target_name.to_json());
m.insert("rustdoc_status".to_owned(), self.rustdoc_status.to_json());
m.insert("default_target".to_owned(), self.default_target.to_json());
m.to_json()
}
}
#[cfg(test)]
mod test {
use crate::test::*;
use crate::web::match_version;
use html5ever::tendril::TendrilSink;
fn release(version: &str, db: &TestDatabase) -> i32 {
db.fake_release()
.name("foo")
.version(version)
.create()
.unwrap()
}
fn version(v: Option<&str>, db: &TestDatabase) -> Option<String> {
match_version(&db.conn(), "foo", v)
.and_then(|version| version.assume_exact().map(|semver| semver.into_parts().0))
}
fn semver(version: &'static str) -> Option<String> {
Some(version.into())
}
fn exact(version: &'static str) -> Option<String> {
Some(version.into())
}
fn clipboard_is_present_for_path(path: &str, web: &TestFrontend) -> bool {
let data = web.get(path).send().unwrap().text().unwrap();
let node = kuchiki::parse_html().one(data);
node.select("#clipboard").unwrap().count() == 1
}
#[test]
fn test_index_returns_success() {
wrapper(|env| {
let web = env.frontend();
assert!(web.get("/").send()?.status().is_success());
Ok(())
});
}
#[test]
fn test_show_clipboard_for_crate_pages() {
wrapper(|env| {
env.db()
.fake_release()
.name("fake_crate")
.version("0.0.1")
.source_file("test.rs", &[])
.create()
.unwrap();
let web = env.frontend();
assert!(clipboard_is_present_for_path(
"/crate/fake_crate/0.0.1",
web
));
assert!(clipboard_is_present_for_path(
"/crate/fake_crate/0.0.1/source/",
web
));
Ok(())
});
}
#[test]
fn test_hide_clipboard_for_non_crate_pages() {
wrapper(|env| {
env.db()
.fake_release()
.name("fake_crate")
.version("0.0.1")
.create()
.unwrap();
let web = env.frontend();
assert!(!clipboard_is_present_for_path("/about", web));
assert!(!clipboard_is_present_for_path("/releases", web));
assert!(!clipboard_is_present_for_path("/", web));
assert!(!clipboard_is_present_for_path(
"/fake_crate/0.0.1/fake_crate",
web
));
assert!(!clipboard_is_present_for_path("/not/a/real/path", web));
Ok(())
});
}
#[test]
fn standard_library_redirects() {
wrapper(|env| {
let web = env.frontend();
for krate in &["std", "alloc", "core", "proc_macro", "test"] {
let target = format!("https://doc.rust-lang.org/stable/{}/", krate);
// with or without slash
assert_redirect(&format!("/{}", krate), &target, web)?;
assert_redirect(&format!("/{}/", krate), &target, web)?;
}
Ok(())
})
}
#[test]
fn binary_docs_redirect_to_crate() {
wrapper(|env| {
let db = env.db();
db.fake_release()
.name("bat")
.version("0.2.0")
.binary(true)
.create()
.unwrap();
let web = env.frontend();
assert_redirect("/bat/0.2.0", "/crate/bat/0.2.0", web)?;
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu", "/crate/bat/0.2.0", web)?;
/* TODO: this should work (https://github.com/rust-lang/docs.rs/issues/603)
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu/bat", "/crate/bat/0.2.0", web)?;
assert_redirect("/bat/0.2.0/i686-unknown-linux-gnu/bat/", "/crate/bat/0.2.0/", web)?;
*/
Ok(())
})
}
#[test]
fn can_view_source() {
wrapper(|env| {
let db = env.db();
db.fake_release()
.name("regex")
.version("0.3.0")
.source_file("src/main.rs", br#"println!("definitely valid rust")"#)
.create()
.unwrap();
let web = env.frontend();
assert_success("/crate/regex/0.3.0/source/src/main.rs", web)?;
assert_success("/crate/regex/0.3.0/source", web)?;
assert_success("/crate/regex/0.3.0/source/src", web)?;
assert_success("/regex/0.3.0/src/regex/main.rs", web)?;
Ok(())
})
}
#[test]
// https://github.com/rust-lang/docs.rs/issues/223
fn prereleases_are_not_considered_for_semver() {
wrapper(|env| {
let db = env.db();
let version = |v| version(v, db);
let release = |v| release(v, db);
release("0.3.1-pre");
assert_eq!(version(Some("*")), semver("0.3.1-pre"));
release("0.3.1-alpha");
assert_eq!(version(Some("0.3.1-alpha")), exact("0.3.1-alpha"));
release("0.3.0");
let three = semver("0.3.0");
assert_eq!(version(None), three);
// same thing but with "*"
assert_eq!(version(Some("*")), three);
// make sure exact matches still work
assert_eq!(version(Some("0.3.0")), exact("0.3.0"));
Ok(())
});
}
#[test]
// https://github.com/rust-lang/docs.rs/issues/221
fn yanked_crates_are_not_considered() {
wrapper(|env| {
let db = env.db();
let release_id = release("0.3.0", db);
let query = "UPDATE releases SET yanked = true WHERE id = $1 AND version = '0.3.0'";
db.conn().query(query, &[&release_id]).unwrap();
assert_eq!(version(None, db), None);
assert_eq!(version(Some("0.3"), db), None);
release("0.1.0+4.1", db);
assert_eq!(version(Some("0.1.0+4.1"), db), exact("0.1.0+4.1"));
assert_eq!(version(None, db), semver("0.1.0+4.1"));
Ok(())
});
}
#[test]
// vaguely related to https://github.com/rust-lang/docs.rs/issues/395
fn metadata_has_no_effect() {
wrapper(|env| {
let db = env.db();
release("0.1.0+4.1", db);
release("0.1.1", db);
assert_eq!(version(None, db), semver("0.1.1"));
release("0.5.1+zstd.1.4.4", db);
assert_eq!(version(None, db), semver("0.5.1+zstd.1.4.4"));
assert_eq!(version(Some("0.5"), db), semver("0.5.1+zstd.1.4.4"));
assert_eq!(
version(Some("0.5.1+zstd.1.4.4"), db),
exact("0.5.1+zstd.1.4.4")
);
Ok(())
});
}
}
|
use std::alloc::alloc;
use std::alloc::dealloc;
use std::alloc::Layout;
use std::mem::size_of;
use std::ptr::null_mut;
const _PAGE_SIZE: usize = 0x1000;
pub struct _Page {
current_page: *mut _Page,
next_object_offset: i32,
exclusive_pages: i32,
}
impl _Page {
pub fn reset(&mut self) {
unsafe {
*(self.get_extension_page_location()) = null_mut();
}
self.next_object_offset = size_of::<_Page>() as i32;
self.exclusive_pages = 0;
self.current_page = &mut *self;
}
pub fn clear(&mut self) {
self.deallocate_extensions();
self.reset();
}
pub fn is_oversized(&self) -> bool {
self.current_page.is_null()
}
fn get_extension_page_location(&self) -> *mut *mut _Page {
// Convert self to page location
let self_location = self as *const _Page as usize;
// Advance one page size so we are past the end of our page
let location_behind_page = (self_location + _PAGE_SIZE) as *mut *mut _Page;
// Go back one pointer size
unsafe { location_behind_page.offset(-1) }
}
fn get_next_location(&self) -> usize {
self as *const _Page as usize + self.next_object_offset as usize
}
fn set_next_location(&mut self, location: usize) {
let self_location = self as *const _Page as usize;
self.next_object_offset = (location - self_location as usize) as i32;
}
fn get_next_exclusive_page_location(&self) -> *mut *mut _Page {
unsafe {
self.get_extension_page_location()
.offset(-(self.exclusive_pages as isize + 1))
}
}
pub fn allocate_raw(&mut self, size: usize, align: usize) -> *mut u8 {
if self as *mut _Page != self.current_page {
unsafe {
// We're already known to be full, so we delegate to the current page
let new_object = (*self.current_page).allocate_raw(size, align);
// Possibly our current page was also full so we propagate back the new current page
let allocating_page = _Page::get_page(new_object as usize);
if allocating_page != self.current_page && (!(*allocating_page).is_oversized()) {
self.current_page = allocating_page;
}
return new_object;
}
}
// Try to allocate from ourselves
let location = self.get_next_location();
let aligned_location = (location + align - 1) & !(align - 1);
let next_location = aligned_location + size;
if next_location <= self.get_next_exclusive_page_location() as usize {
self.set_next_location(next_location);
return location as *mut u8;
}
// So the space did not fit.
// Calculate gross size to decide whether we're oversized
if size_of::<_Page>() + size + size_of::<*mut *mut _Page>() > _PAGE_SIZE {
if self.get_next_location() >= self.get_next_exclusive_page_location() as usize {
// Allocate an extension page and try again with it
unsafe {
return (*self.allocate_extension_page()).allocate_raw(size, align);
}
}
unsafe {
// We allocate oversized objects directly.
let memory = alloc(Layout::from_size_align_unchecked(
size + size_of::<_Page>(),
_PAGE_SIZE,
));
// Initialize a _Page object at the page start
let page = memory as *mut _Page;
// Oversized pages have no current_page
(*page).current_page = null_mut();
// Set the size since we will need it when deallocating
(*page).next_object_offset = (size % 0x100000000) as i32;
(*page).exclusive_pages = (size / 0x100000000) as i32;
*(self.get_next_exclusive_page_location()) = page;
self.exclusive_pages += 1;
return page.offset(1) as *mut u8;
}
}
// So we're not oversized. Create extension page and let it allocate.
unsafe { (*self.allocate_extension_page()).allocate_raw(size, align) }
}
fn allocate_extension_page(&mut self) -> *mut _Page {
unsafe {
let page = _Page::allocate_page();
*(self.get_extension_page_location()) = page;
self.current_page = page;
&mut *page
}
}
fn allocate_page() -> *mut _Page {
unsafe {
let page =
alloc(Layout::from_size_align_unchecked(_PAGE_SIZE, _PAGE_SIZE)) as *mut _Page;
(*page).reset();
page
}
}
pub fn allocate_exclusive_page(&mut self) -> *mut _Page {
unsafe {
if self as *mut _Page != self.current_page {
// We're already known to be full, so we delegate to the current page
return (*self.current_page).allocate_exclusive_page();
}
// Check first whether we need an ordinary extension
if self.get_next_location() as usize >= self.get_next_exclusive_page_location() as usize
{
// Allocate an extension page with default size
return (*self.allocate_extension_page()).allocate_exclusive_page();
}
let page = _Page::allocate_page();
*(self.get_next_exclusive_page_location()) = page;
self.exclusive_pages += 1;
&mut *page
}
}
pub fn extend(&mut self, top: usize, size: usize) -> bool {
if top != self.get_next_location() {
return false;
}
let new_top = top + size;
if new_top > self.get_next_exclusive_page_location() as usize {
return false;
}
self.set_next_location(new_top);
true
}
fn deallocate_extensions(&mut self) {
let mut page: *mut _Page = self;
unsafe {
while !page.is_null() {
let extension_pointer = (*page).get_extension_page_location();
for i in 1..(*page).exclusive_pages {
let mut exclusive_page = &mut **(extension_pointer.offset(-(i as isize)));
if !exclusive_page.is_oversized() {
exclusive_page.deallocate_extensions();
}
exclusive_page.forget();
}
if page != self as *mut _Page {
(*page).forget();
}
page = *extension_pointer;
}
}
}
pub fn forget(&self) {
let size = if self.current_page == null_mut() {
self.next_object_offset as usize + self.exclusive_pages as usize * 0x100000000
} else {
_PAGE_SIZE
};
unsafe {
dealloc(
self as *const _Page as *mut u8,
Layout::from_size_align_unchecked(size, _PAGE_SIZE),
)
}
}
pub fn reclaim_array(&mut self, address: *mut _Page) -> bool {
unsafe {
let mut page: *mut _Page = self;
while !page.is_null() {
if (*page).deallocate_exclusive_page(address) {
return true;
}
page = *(*page).get_extension_page_location();
}
}
// If we arrive here, we have a memory leak.
false
}
pub fn get_page(address: usize) -> *mut _Page {
(address & !(_PAGE_SIZE - 1)) as *mut _Page
}
fn deallocate_exclusive_page(&mut self, page: *mut _Page) -> bool {
unsafe {
// Find the extension Page pointer
let mut extension_pointer = self.get_extension_page_location().offset(-1);
let next_extension_page_location = self.get_next_exclusive_page_location();
while extension_pointer > next_extension_page_location {
if *extension_pointer == page {
break;
}
extension_pointer = extension_pointer.offset(-1);
}
// Report if we could not find it
if extension_pointer == next_extension_page_location {
return false;
}
// Shift the remaining array one position up
while extension_pointer > next_extension_page_location {
let pp_page = extension_pointer;
*pp_page = *(pp_page.offset(-1));
extension_pointer = extension_pointer.offset(-1);
}
}
// Make room for one more extension
self.exclusive_pages -= 1;
unsafe {
(*page).forget();
}
true
}
}
#[test]
fn test_page() {
unsafe {
// Allocate a page
let memory = alloc(Layout::from_size_align_unchecked(_PAGE_SIZE, _PAGE_SIZE));
assert_ne!(memory, null_mut());
// Write some numbers to the page memory
for i in 0.._PAGE_SIZE {
let ptr = memory.offset(i as isize);
*ptr = i as u8
}
// Initialize a _Page object at the page start
let page = &mut *(memory as *mut _Page);
assert_ne!(page.next_object_offset, 0);
assert_ne!(page.exclusive_pages, 0);
page.reset();
assert_eq!(page.next_object_offset, size_of::<_Page>() as i32);
assert_eq!(page.exclusive_pages, 0);
assert_eq!(page.is_oversized(), false);
assert_eq!(page as *mut _Page, page.current_page);
{
let extension_page_location = page.get_extension_page_location();
assert_eq!(
extension_page_location as usize,
page as *const _Page as usize + _PAGE_SIZE - size_of::<*mut *mut _Page>()
);
}
{
let mut location = page.get_next_location();
assert_eq!(
location as usize,
page as *const _Page as usize + size_of::<_Page>()
);
let answer = page.allocate_raw(1, 1);
location += 1;
assert_eq!(page.get_next_location(), location);
*answer = 42;
let another = page.allocate_raw(1, 2);
location += 2;
assert_eq!(page.get_next_location(), location);
*another = 43;
let eau = page.allocate_raw(4, 4) as *mut i32;
location += 5;
assert_eq!(page.get_next_location(), location);
*eau = 4711;
assert_eq!(_Page::get_page(eau as usize), page as *mut _Page);
// Allocate an oversized page which should cause allocating an exclusive page
let array = page.allocate_raw(_PAGE_SIZE, 8) as *mut usize;
for i in 0.._PAGE_SIZE / size_of::<usize>() - 1 {
*(array.offset(i as isize)) = i;
}
assert_eq!(page.get_next_location(), location);
assert_eq!(page as *mut _Page, page.current_page);
// Overflow the page
for _ in 0.._PAGE_SIZE {
page.allocate_raw(1, 1);
}
assert_ne!(page as *mut _Page, page.current_page);
assert_eq!(*(page.get_extension_page_location()), page.current_page);
assert_eq!((*page.current_page).exclusive_pages, 0);
assert_eq!((*page.current_page).current_page, page.current_page);
assert_eq!(page.exclusive_pages, 1);
assert_eq!(*answer, 42);
assert_eq!(*another, 43);
assert_eq!(*eau, 4711);
let exclusive_page = page.get_next_exclusive_page_location().offset(1);
assert_eq!((**exclusive_page).current_page, null_mut());
assert_eq!(_Page::get_page(array as usize), *exclusive_page);
let success = page.reclaim_array(_Page::get_page(array as usize));
assert_eq!(success, true);
assert_eq!(page.exclusive_pages, 0);
page.clear();
page.forget();
}
}
}
fixed storing the size in oversized pages
use std::alloc::alloc;
use std::alloc::dealloc;
use std::alloc::Layout;
use std::mem::size_of;
use std::ptr::null_mut;
const _PAGE_SIZE: usize = 0x1000;
pub struct _Page {
current_page: *mut _Page,
next_object_offset: i32,
exclusive_pages: i32,
}
impl _Page {
pub fn reset(&mut self) {
unsafe {
*(self.get_extension_page_location()) = null_mut();
}
self.next_object_offset = size_of::<_Page>() as i32;
self.exclusive_pages = 0;
self.current_page = &mut *self;
}
pub fn clear(&mut self) {
self.deallocate_extensions();
self.reset();
}
pub fn is_oversized(&self) -> bool {
self.current_page.is_null()
}
fn get_extension_page_location(&self) -> *mut *mut _Page {
// Convert self to page location
let self_location = self as *const _Page as usize;
// Advance one page size so we are past the end of our page
let location_behind_page = (self_location + _PAGE_SIZE) as *mut *mut _Page;
// Go back one pointer size
unsafe { location_behind_page.offset(-1) }
}
fn get_next_location(&self) -> usize {
self as *const _Page as usize + self.next_object_offset as usize
}
fn set_next_location(&mut self, location: usize) {
let self_location = self as *const _Page as usize;
self.next_object_offset = (location - self_location as usize) as i32;
}
fn get_next_exclusive_page_location(&self) -> *mut *mut _Page {
unsafe {
self.get_extension_page_location()
.offset(-(self.exclusive_pages as isize + 1))
}
}
pub fn allocate_raw(&mut self, size: usize, align: usize) -> *mut u8 {
if self as *mut _Page != self.current_page {
unsafe {
// We're already known to be full, so we delegate to the current page
let new_object = (*self.current_page).allocate_raw(size, align);
// Possibly our current page was also full so we propagate back the new current page
let allocating_page = _Page::get_page(new_object as usize);
if allocating_page != self.current_page && (!(*allocating_page).is_oversized()) {
self.current_page = allocating_page;
}
return new_object;
}
}
// Try to allocate from ourselves
let location = self.get_next_location();
let aligned_location = (location + align - 1) & !(align - 1);
let next_location = aligned_location + size;
if next_location <= self.get_next_exclusive_page_location() as usize {
self.set_next_location(next_location);
return location as *mut u8;
}
// So the space did not fit.
// Calculate gross size to decide whether we're oversized
if size_of::<_Page>() + size + size_of::<*mut *mut _Page>() > _PAGE_SIZE {
if self.get_next_location() >= self.get_next_exclusive_page_location() as usize {
// Allocate an extension page and try again with it
unsafe {
return (*self.allocate_extension_page()).allocate_raw(size, align);
}
}
unsafe {
let gross_size = size + size_of::<_Page>();
// We allocate oversized objects directly.
let memory = alloc(Layout::from_size_align_unchecked(
gross_size,
_PAGE_SIZE,
));
// Initialize a _Page object at the page start
let page = memory as *mut _Page;
// Oversized pages have no current_page
(*page).current_page = null_mut();
// Set the size since we will need it when deallocating
(*page).next_object_offset = (gross_size % 0x100000000) as i32;
(*page).exclusive_pages = (gross_size / 0x100000000) as i32;
*(self.get_next_exclusive_page_location()) = page;
self.exclusive_pages += 1;
return page.offset(1) as *mut u8;
}
}
// So we're not oversized. Create extension page and let it allocate.
unsafe { (*self.allocate_extension_page()).allocate_raw(size, align) }
}
fn allocate_extension_page(&mut self) -> *mut _Page {
unsafe {
let page = _Page::allocate_page();
*(self.get_extension_page_location()) = page;
self.current_page = page;
&mut *page
}
}
fn allocate_page() -> *mut _Page {
unsafe {
let page =
alloc(Layout::from_size_align_unchecked(_PAGE_SIZE, _PAGE_SIZE)) as *mut _Page;
(*page).reset();
page
}
}
pub fn allocate_exclusive_page(&mut self) -> *mut _Page {
unsafe {
if self as *mut _Page != self.current_page {
// We're already known to be full, so we delegate to the current page
return (*self.current_page).allocate_exclusive_page();
}
// Check first whether we need an ordinary extension
if self.get_next_location() as usize >= self.get_next_exclusive_page_location() as usize
{
// Allocate an extension page with default size
return (*self.allocate_extension_page()).allocate_exclusive_page();
}
let page = _Page::allocate_page();
*(self.get_next_exclusive_page_location()) = page;
self.exclusive_pages += 1;
&mut *page
}
}
pub fn extend(&mut self, top: usize, size: usize) -> bool {
if top != self.get_next_location() {
return false;
}
let new_top = top + size;
if new_top > self.get_next_exclusive_page_location() as usize {
return false;
}
self.set_next_location(new_top);
true
}
fn deallocate_extensions(&mut self) {
let mut page: *mut _Page = self;
unsafe {
while !page.is_null() {
let extension_pointer = (*page).get_extension_page_location();
for i in 1..(*page).exclusive_pages {
let mut exclusive_page = &mut **(extension_pointer.offset(-(i as isize)));
if !exclusive_page.is_oversized() {
exclusive_page.deallocate_extensions();
}
exclusive_page.forget();
}
if page != self as *mut _Page {
(*page).forget();
}
page = *extension_pointer;
}
}
}
pub fn forget(&self) {
let size = if self.current_page == null_mut() {
self.next_object_offset as usize + self.exclusive_pages as usize * 0x100000000
} else {
_PAGE_SIZE
};
unsafe {
dealloc(
self as *const _Page as *mut u8,
Layout::from_size_align_unchecked(size, _PAGE_SIZE),
)
}
}
pub fn reclaim_array(&mut self, address: *mut _Page) -> bool {
unsafe {
let mut page: *mut _Page = self;
while !page.is_null() {
if (*page).deallocate_exclusive_page(address) {
return true;
}
page = *(*page).get_extension_page_location();
}
}
// If we arrive here, we have a memory leak.
false
}
pub fn get_page(address: usize) -> *mut _Page {
(address & !(_PAGE_SIZE - 1)) as *mut _Page
}
fn deallocate_exclusive_page(&mut self, page: *mut _Page) -> bool {
unsafe {
// Find the extension Page pointer
let mut extension_pointer = self.get_extension_page_location().offset(-1);
let next_extension_page_location = self.get_next_exclusive_page_location();
while extension_pointer > next_extension_page_location {
if *extension_pointer == page {
break;
}
extension_pointer = extension_pointer.offset(-1);
}
// Report if we could not find it
if extension_pointer == next_extension_page_location {
return false;
}
// Shift the remaining array one position up
while extension_pointer > next_extension_page_location {
let pp_page = extension_pointer;
*pp_page = *(pp_page.offset(-1));
extension_pointer = extension_pointer.offset(-1);
}
}
// Make room for one more extension
self.exclusive_pages -= 1;
unsafe {
(*page).forget();
}
true
}
}
#[test]
fn test_page() {
unsafe {
// Allocate a page
let memory = alloc(Layout::from_size_align_unchecked(_PAGE_SIZE, _PAGE_SIZE));
assert_ne!(memory, null_mut());
// Write some numbers to the page memory
for i in 0.._PAGE_SIZE {
let ptr = memory.offset(i as isize);
*ptr = i as u8
}
// Initialize a _Page object at the page start
let page = &mut *(memory as *mut _Page);
assert_ne!(page.next_object_offset, 0);
assert_ne!(page.exclusive_pages, 0);
page.reset();
assert_eq!(page.next_object_offset, size_of::<_Page>() as i32);
assert_eq!(page.exclusive_pages, 0);
assert_eq!(page.is_oversized(), false);
assert_eq!(page as *mut _Page, page.current_page);
{
let extension_page_location = page.get_extension_page_location();
assert_eq!(
extension_page_location as usize,
page as *const _Page as usize + _PAGE_SIZE - size_of::<*mut *mut _Page>()
);
}
{
let mut location = page.get_next_location();
assert_eq!(
location as usize,
page as *const _Page as usize + size_of::<_Page>()
);
let answer = page.allocate_raw(1, 1);
location += 1;
assert_eq!(page.get_next_location(), location);
*answer = 42;
let another = page.allocate_raw(1, 2);
location += 2;
assert_eq!(page.get_next_location(), location);
*another = 43;
let eau = page.allocate_raw(4, 4) as *mut i32;
location += 5;
assert_eq!(page.get_next_location(), location);
*eau = 4711;
assert_eq!(_Page::get_page(eau as usize), page as *mut _Page);
// Allocate an oversized page which should cause allocating an exclusive page
let array = page.allocate_raw(_PAGE_SIZE, 8) as *mut usize;
for i in 0.._PAGE_SIZE / size_of::<usize>() - 1 {
*(array.offset(i as isize)) = i;
}
assert_eq!(page.get_next_location(), location);
assert_eq!(page as *mut _Page, page.current_page);
// Overflow the page
for _ in 0.._PAGE_SIZE {
page.allocate_raw(1, 1);
}
assert_ne!(page as *mut _Page, page.current_page);
assert_eq!(*(page.get_extension_page_location()), page.current_page);
assert_eq!((*page.current_page).exclusive_pages, 0);
assert_eq!((*page.current_page).current_page, page.current_page);
assert_eq!(page.exclusive_pages, 1);
assert_eq!(*answer, 42);
assert_eq!(*another, 43);
assert_eq!(*eau, 4711);
let exclusive_page = page.get_next_exclusive_page_location().offset(1);
assert_eq!((**exclusive_page).current_page, null_mut());
assert_eq!((**exclusive_page).exclusive_pages, 0);
assert_eq!((**exclusive_page).next_object_offset as usize, size_of::<_Page>() + _PAGE_SIZE );
assert_eq!(_Page::get_page(array as usize), *exclusive_page);
let success = page.reclaim_array(_Page::get_page(array as usize));
assert_eq!(success, true);
assert_eq!(page.exclusive_pages, 0);
page.clear();
page.forget();
}
}
}
|
// Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Library General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public
// License along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
// Boston, MA 02110-1335, USA.
use either::Either;
use futures::future::BoxFuture;
use futures::future::{abortable, AbortHandle, Aborted};
use futures::lock::{Mutex, MutexGuard};
use futures::prelude::*;
use glib;
use glib::prelude::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::{glib_object_impl, glib_object_subclass};
use gst;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_error_msg, gst_info, gst_log, gst_trace};
use gst_rtp::RTPBuffer;
use lazy_static::lazy_static;
use std::cmp::{max, min, Ordering};
use std::collections::{BTreeSet, VecDeque};
use std::time::Duration;
use crate::runtime::prelude::*;
use crate::runtime::{
self, Context, JoinHandle, PadContext, PadSink, PadSinkRef, PadSrc, PadSrcRef, PadSrcWeak,
};
use super::{RTPJitterBuffer, RTPJitterBufferItem, RTPPacketRateCtx};
const DEFAULT_LATENCY_MS: u32 = 200;
const DEFAULT_DO_LOST: bool = false;
const DEFAULT_MAX_DROPOUT_TIME: u32 = 60000;
const DEFAULT_MAX_MISORDER_TIME: u32 = 2000;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: u32 = 0;
#[derive(Debug, Clone)]
struct Settings {
latency_ms: u32,
do_lost: bool,
max_dropout_time: u32,
max_misorder_time: u32,
context: String,
context_wait: u32,
}
impl Default for Settings {
fn default() -> Self {
Settings {
latency_ms: DEFAULT_LATENCY_MS,
do_lost: DEFAULT_DO_LOST,
max_dropout_time: DEFAULT_MAX_DROPOUT_TIME,
max_misorder_time: DEFAULT_MAX_MISORDER_TIME,
context: DEFAULT_CONTEXT.into(),
context_wait: DEFAULT_CONTEXT_WAIT,
}
}
}
static PROPERTIES: [subclass::Property; 7] = [
subclass::Property("latency", |name| {
glib::ParamSpec::uint(
name,
"Buffer latency in ms",
"Amount of ms to buffer",
0,
std::u32::MAX,
DEFAULT_LATENCY_MS,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("do-lost", |name| {
glib::ParamSpec::boolean(
name,
"Do Lost",
"Send an event downstream when a packet is lost",
DEFAULT_DO_LOST,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("max-dropout-time", |name| {
glib::ParamSpec::uint(
name,
"Max dropout time",
"The maximum time (milliseconds) of missing packets tolerated.",
0,
std::u32::MAX,
DEFAULT_MAX_DROPOUT_TIME,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("max-misorder-time", |name| {
glib::ParamSpec::uint(
name,
"Max misorder time",
"The maximum time (milliseconds) of misordered packets tolerated.",
0,
std::u32::MAX,
DEFAULT_MAX_MISORDER_TIME,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("stats", |name| {
glib::ParamSpec::boxed(
name,
"Statistics",
"Various statistics",
gst::Structure::static_type(),
glib::ParamFlags::READABLE,
)
}),
subclass::Property("context", |name| {
glib::ParamSpec::string(
name,
"Context",
"Context name to share threads with",
Some(DEFAULT_CONTEXT),
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("context-wait", |name| {
glib::ParamSpec::uint(
name,
"Context Wait",
"Throttle poll loop to run at most once every this many ms",
0,
1000,
DEFAULT_CONTEXT_WAIT,
glib::ParamFlags::READWRITE,
)
}),
];
#[derive(Clone, Debug)]
struct JitterBufferPadSinkHandler;
impl PadSinkHandler for JitterBufferPadSinkHandler {
type ElementImpl = JitterBuffer;
fn sink_chain(
&self,
pad: &PadSinkRef,
_jitterbuffer: &JitterBuffer,
element: &gst::Element,
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
let pad_weak = pad.downgrade();
let element = element.clone();
async move {
let pad = pad_weak.upgrade().expect("PadSink no longer exists");
gst_debug!(CAT, obj: pad.gst_pad(), "Handling {:?}", buffer);
let jitterbuffer = JitterBuffer::from_instance(&element);
jitterbuffer
.enqueue_item(pad.gst_pad(), &element, Some(buffer))
.await
}
.boxed()
}
fn sink_event(
&self,
pad: &PadSinkRef,
jitterbuffer: &JitterBuffer,
element: &gst::Element,
event: gst::Event,
) -> Either<bool, BoxFuture<'static, bool>> {
use gst::EventView;
if event.is_serialized() {
let pad_weak = pad.downgrade();
let element = element.clone();
Either::Right(
async move {
let pad = pad_weak.upgrade().expect("PadSink no longer exists");
let mut forward = true;
gst_log!(CAT, obj: pad.gst_pad(), "Handling {:?}", event);
let jitterbuffer = JitterBuffer::from_instance(&element);
match event.view() {
EventView::FlushStop(..) => {
jitterbuffer.flush(&element).await;
}
EventView::Segment(e) => {
let mut state = jitterbuffer.state.lock().await;
state.segment = e
.get_segment()
.clone()
.downcast::<gst::format::Time>()
.unwrap();
}
EventView::Eos(..) => {
let mut state = jitterbuffer.state.lock().await;
jitterbuffer.drain(&mut state, &element).await;
}
EventView::CustomDownstreamSticky(e) => {
if PadContext::is_pad_context_sticky_event(&e) {
forward = false;
}
}
_ => (),
};
if forward {
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding serialized {:?}", event);
jitterbuffer.src_pad.push_event(event).await
} else {
true
}
}
.boxed(),
)
} else {
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding non-serialized {:?}", event);
Either::Left(jitterbuffer.src_pad.gst_pad().push_event(event))
}
}
fn sink_query(
&self,
pad: &PadSinkRef,
jitterbuffer: &JitterBuffer,
element: &gst::Element,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryView;
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding {:?}", query);
match query.view_mut() {
QueryView::Drain(..) => {
gst_info!(CAT, obj: pad.gst_pad(), "Draining");
runtime::executor::block_on(jitterbuffer.enqueue_item(pad.gst_pad(), element, None))
.is_ok()
}
_ => jitterbuffer.src_pad.gst_pad().peer_query(query),
}
}
}
#[derive(Clone, Debug)]
struct JitterBufferPadSrcHandler;
impl PadSrcHandler for JitterBufferPadSrcHandler {
type ElementImpl = JitterBuffer;
fn src_query(
&self,
pad: &PadSrcRef,
jitterbuffer: &JitterBuffer,
_element: &gst::Element,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryView;
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding {:?}", query);
match query.view_mut() {
QueryView::Latency(ref mut q) => {
let mut peer_query = gst::query::Query::new_latency();
let ret = jitterbuffer.sink_pad.gst_pad().peer_query(&mut peer_query);
if ret {
let (_, mut min_latency, _) = peer_query.get_result();
let our_latency = runtime::executor::block_on(jitterbuffer.settings.lock())
.latency_ms as u64
* gst::MSECOND;
min_latency += our_latency;
let max_latency = gst::CLOCK_TIME_NONE;
q.set(true, min_latency, max_latency);
}
ret
}
QueryView::Position(ref mut q) => {
if q.get_format() != gst::Format::Time {
jitterbuffer.sink_pad.gst_pad().peer_query(query)
} else {
q.set(
runtime::executor::block_on(jitterbuffer.state.lock())
.segment
.get_position(),
);
true
}
}
_ => jitterbuffer.sink_pad.gst_pad().peer_query(query),
}
}
}
#[derive(Eq)]
struct GapPacket(gst::Buffer);
impl Ord for GapPacket {
fn cmp(&self, other: &Self) -> Ordering {
let mut rtp_buffer = RTPBuffer::from_buffer_readable(&self.0).unwrap();
let mut other_rtp_buffer = RTPBuffer::from_buffer_readable(&other.0).unwrap();
let seq = rtp_buffer.get_seq();
let other_seq = other_rtp_buffer.get_seq();
drop(rtp_buffer);
drop(other_rtp_buffer);
0.cmp(&gst_rtp::compare_seqnum(seq, other_seq))
}
}
impl PartialOrd for GapPacket {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for GapPacket {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
struct State {
jbuf: glib::SendUniqueCell<RTPJitterBuffer>,
packet_rate_ctx: RTPPacketRateCtx,
clock_rate: i32,
segment: gst::FormattedSegment<gst::ClockTime>,
ips_rtptime: u32,
ips_pts: gst::ClockTime,
last_pt: u32,
last_in_seqnum: u32,
packet_spacing: gst::ClockTime,
gap_packets: Option<BTreeSet<GapPacket>>,
last_popped_seqnum: u32,
num_pushed: u64,
num_lost: u64,
num_late: u64,
last_rtptime: u32,
equidistant: i32,
earliest_pts: gst::ClockTime,
earliest_seqnum: u16,
last_popped_pts: gst::ClockTime,
discont: bool,
last_res: Result<gst::FlowSuccess, gst::FlowError>,
task_queue_abort_handle: Option<AbortHandle>,
wakeup_abort_handle: Option<AbortHandle>,
wakeup_join_handle: Option<JoinHandle<Result<(), Aborted>>>,
}
impl Default for State {
fn default() -> State {
State {
jbuf: glib::SendUniqueCell::new(RTPJitterBuffer::new()).unwrap(),
packet_rate_ctx: RTPPacketRateCtx::new(),
clock_rate: -1,
segment: gst::FormattedSegment::<gst::ClockTime>::new(),
ips_rtptime: 0,
ips_pts: gst::CLOCK_TIME_NONE,
last_pt: std::u32::MAX,
last_in_seqnum: std::u32::MAX,
packet_spacing: gst::ClockTime(Some(0)),
gap_packets: Some(BTreeSet::new()),
last_popped_seqnum: std::u32::MAX,
num_pushed: 0,
num_lost: 0,
num_late: 0,
last_rtptime: std::u32::MAX,
equidistant: 0,
earliest_pts: gst::CLOCK_TIME_NONE,
earliest_seqnum: 0,
last_popped_pts: gst::CLOCK_TIME_NONE,
discont: false,
last_res: Ok(gst::FlowSuccess::Ok),
task_queue_abort_handle: None,
wakeup_abort_handle: None,
wakeup_join_handle: None,
}
}
}
struct JitterBuffer {
sink_pad: PadSink,
src_pad: PadSrc,
state: Mutex<State>,
settings: Mutex<Settings>,
}
lazy_static! {
static ref CAT: gst::DebugCategory = gst::DebugCategory::new(
"ts-jitterbuffer",
gst::DebugColorFlags::empty(),
Some("Thread-sharing jitterbuffer"),
);
}
impl JitterBuffer {
fn get_current_running_time(&self, element: &gst::Element) -> gst::ClockTime {
if let Some(clock) = element.get_clock() {
if clock.get_time() > element.get_base_time() {
clock.get_time() - element.get_base_time()
} else {
gst::ClockTime(Some(0))
}
} else {
gst::CLOCK_TIME_NONE
}
}
fn parse_caps(
&self,
state: &mut MutexGuard<State>,
element: &gst::Element,
caps: &gst::Caps,
pt: u8,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let s = caps.get_structure(0).ok_or(gst::FlowError::Error)?;
gst_info!(CAT, obj: element, "Parsing {:?}", caps);
let payload = s
.get_some::<i32>("payload")
.map_err(|_| gst::FlowError::Error)?;
if pt != 0 && payload as u8 != pt {
return Err(gst::FlowError::Error);
}
state.last_pt = pt as u32;
state.clock_rate = s
.get_some::<i32>("clock-rate")
.map_err(|_| gst::FlowError::Error)?;
if state.clock_rate <= 0 {
return Err(gst::FlowError::Error);
}
let clock_rate = state.clock_rate;
state.packet_rate_ctx.reset(clock_rate);
state.jbuf.borrow().set_clock_rate(clock_rate as u32);
Ok(gst::FlowSuccess::Ok)
}
fn calculate_packet_spacing(
&self,
state: &mut MutexGuard<State>,
rtptime: u32,
pts: gst::ClockTime,
) {
if state.ips_rtptime != rtptime {
if state.ips_pts.is_some() && pts.is_some() {
let new_packet_spacing = pts - state.ips_pts;
let old_packet_spacing = state.packet_spacing;
if old_packet_spacing > new_packet_spacing {
state.packet_spacing = (new_packet_spacing + 3 * old_packet_spacing) / 4;
} else if old_packet_spacing > gst::ClockTime(Some(0)) {
state.packet_spacing = (3 * new_packet_spacing + old_packet_spacing) / 4;
} else {
state.packet_spacing = new_packet_spacing;
}
gst_debug!(
CAT,
"new packet spacing {}, old packet spacing {} combined to {}",
new_packet_spacing,
old_packet_spacing,
state.packet_spacing
);
}
state.ips_rtptime = rtptime;
state.ips_pts = pts;
}
}
fn handle_big_gap_buffer(
&self,
state: &mut MutexGuard<State>,
element: &gst::Element,
buffer: gst::Buffer,
pt: u8,
) -> bool {
let gap_packets = state.gap_packets.as_mut().unwrap();
let gap_packets_length = gap_packets.len();
let mut reset = false;
gst_debug!(
CAT,
obj: element,
"Handling big gap, gap packets length: {}",
gap_packets_length
);
gap_packets.insert(GapPacket(buffer));
if gap_packets_length > 0 {
let mut prev_gap_seq = std::u32::MAX;
let mut all_consecutive = true;
for gap_packet in gap_packets.iter() {
let mut rtp_buffer = RTPBuffer::from_buffer_readable(&gap_packet.0).unwrap();
let gap_pt = rtp_buffer.get_payload_type();
let gap_seq = rtp_buffer.get_seq();
gst_log!(
CAT,
obj: element,
"Looking at gap packet with seq {}",
gap_seq
);
drop(rtp_buffer);
all_consecutive = gap_pt == pt;
if prev_gap_seq == std::u32::MAX {
prev_gap_seq = gap_seq as u32;
} else if gst_rtp::compare_seqnum(gap_seq, prev_gap_seq as u16) != -1 {
all_consecutive = false;
} else {
prev_gap_seq = gap_seq as u32;
}
if !all_consecutive {
break;
}
}
gst_debug!(CAT, obj: element, "all consecutive: {}", all_consecutive);
if all_consecutive && gap_packets_length > 3 {
reset = true;
} else if !all_consecutive {
gap_packets.clear();
}
}
reset
}
fn reset(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
) -> BTreeSet<GapPacket> {
gst_info!(CAT, obj: element, "Resetting");
state.jbuf.borrow().flush();
state.jbuf.borrow().reset_skew();
state.discont = true;
state.last_popped_seqnum = std::u32::MAX;
state.last_in_seqnum = std::u32::MAX;
state.ips_rtptime = 0;
state.ips_pts = gst::CLOCK_TIME_NONE;
let gap_packets = state.gap_packets.take();
state.gap_packets = Some(BTreeSet::new());
// Handle gap_packets in caller to avoid recursion
gap_packets.unwrap()
}
async fn store(
&self,
state: &mut MutexGuard<'_, State>,
pad: &gst::Pad,
element: &gst::Element,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let (max_misorder_time, max_dropout_time) = {
let settings = self.settings.lock().await;
(settings.max_misorder_time, settings.max_dropout_time)
};
let (seq, rtptime, pt) = {
let mut rtp_buffer =
RTPBuffer::from_buffer_readable(&buffer).map_err(|_| gst::FlowError::Error)?;
(
rtp_buffer.get_seq(),
rtp_buffer.get_timestamp(),
rtp_buffer.get_payload_type(),
)
};
let mut pts = buffer.get_pts();
let mut dts = buffer.get_dts();
let mut estimated_dts = false;
gst_log!(
CAT,
obj: element,
"Storing buffer, seq: {}, rtptime: {}, pt: {}",
seq,
rtptime,
pt
);
if dts == gst::CLOCK_TIME_NONE {
dts = pts;
} else if pts == gst::CLOCK_TIME_NONE {
pts = dts;
}
if dts == gst::CLOCK_TIME_NONE {
dts = self.get_current_running_time(element);
pts = dts;
estimated_dts = state.clock_rate != -1;
} else {
dts = state.segment.to_running_time(dts);
}
if state.clock_rate == -1 {
state.ips_rtptime = rtptime;
state.ips_pts = pts;
}
if state.last_pt != pt as u32 {
state.last_pt = pt as u32;
state.clock_rate = -1;
gst_debug!(CAT, obj: pad, "New payload type: {}", pt);
if let Some(caps) = pad.get_current_caps() {
self.parse_caps(state, element, &caps, pt)?;
}
}
if state.clock_rate == -1 {
let caps = element
.emit("request-pt-map", &[&(pt as u32)])
.map_err(|_| gst::FlowError::Error)?
.ok_or(gst::FlowError::Error)?
.get::<gst::Caps>()
.map_err(|_| gst::FlowError::Error)?
.ok_or(gst::FlowError::Error)?;
self.parse_caps(state, element, &caps, pt)?;
}
state.packet_rate_ctx.update(seq, rtptime);
let max_dropout = state
.packet_rate_ctx
.get_max_dropout(max_dropout_time as i32);
let max_misorder = state
.packet_rate_ctx
.get_max_dropout(max_misorder_time as i32);
pts = state.jbuf.borrow().calculate_pts(
dts,
estimated_dts,
rtptime,
element.get_base_time(),
0,
false,
);
if pts.is_none() {
gst_debug!(
CAT,
obj: element,
"cannot calculate a valid pts for #{}, discard",
seq
);
return Ok(gst::FlowSuccess::Ok);
}
if state.last_in_seqnum != std::u32::MAX {
let gap = gst_rtp::compare_seqnum(state.last_in_seqnum as u16, seq);
if gap == 1 {
self.calculate_packet_spacing(state, rtptime, pts);
} else {
if (gap != -1 && gap < -(max_misorder as i32)) || (gap >= max_dropout as i32) {
let reset = self.handle_big_gap_buffer(state, element, buffer, pt);
if reset {
// Handle reset in `enqueue_item` to avoid recursion
return Err(gst::FlowError::CustomError);
} else {
return Ok(gst::FlowSuccess::Ok);
}
}
state.ips_pts = gst::CLOCK_TIME_NONE;
state.ips_rtptime = 0;
}
state.gap_packets.as_mut().unwrap().clear();
}
if state.last_popped_seqnum != std::u32::MAX {
let gap = gst_rtp::compare_seqnum(state.last_popped_seqnum as u16, seq);
if gap <= 0 {
state.num_late += 1;
gst_debug!(CAT, obj: element, "Dropping late {}", seq);
return Ok(gst::FlowSuccess::Ok);
}
}
state.last_in_seqnum = seq as u32;
let jb_item = if estimated_dts {
RTPJitterBufferItem::new(buffer, gst::CLOCK_TIME_NONE, pts, seq as u32, rtptime)
} else {
RTPJitterBufferItem::new(buffer, dts, pts, seq as u32, rtptime)
};
let (success, _, _) = state.jbuf.borrow().insert(jb_item);
if !success {
/* duplicate */
return Ok(gst::FlowSuccess::Ok);
}
if rtptime == state.last_rtptime {
state.equidistant -= 2;
} else {
state.equidistant += 1;
}
state.equidistant = min(max(state.equidistant, -7), 7);
state.last_rtptime = rtptime;
if state.earliest_pts.is_none()
|| (pts.is_some()
&& (pts < state.earliest_pts
|| (pts == state.earliest_pts && seq > state.earliest_seqnum)))
{
state.earliest_pts = pts;
state.earliest_seqnum = seq;
}
gst_log!(CAT, obj: pad, "Stored buffer");
Ok(gst::FlowSuccess::Ok)
}
async fn push_lost_events(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
seqnum: u32,
pts: gst::ClockTime,
discont: &mut bool,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let (latency_ns, do_lost) = {
let settings = self.settings.lock().await;
(
settings.latency_ms as i64 * gst::MSECOND.nseconds().unwrap() as i64,
settings.do_lost,
)
};
let mut ret = true;
gst_debug!(
CAT,
obj: element,
"Pushing lost events seq: {}, last popped seq: {}",
seqnum,
state.last_popped_seqnum
);
if state.last_popped_seqnum != std::u32::MAX {
let mut lost_seqnum = ((state.last_popped_seqnum + 1) & 0xffff) as i64;
let gap = gst_rtp::compare_seqnum(lost_seqnum as u16, seqnum as u16) as i64;
if gap > 0 {
let interval = pts.nseconds().unwrap() as i64
- state.last_popped_pts.nseconds().unwrap() as i64;
let spacing = if interval >= 0 {
interval / (gap as i64 + 1)
} else {
0
};
*discont = true;
if state.equidistant > 0 && gap > 1 && gap * spacing > latency_ns {
let n_packets = gap - latency_ns / spacing;
if do_lost {
let s = gst::Structure::new(
"GstRTPPacketLost",
&[
("seqnum", &(lost_seqnum as u32)),
(
"timestamp",
&(state.last_popped_pts + gst::ClockTime(Some(spacing as u64))),
),
("duration", &((n_packets * spacing) as u64)),
("retry", &0),
],
);
let event = gst::Event::new_custom_downstream(s).build();
ret = self.src_pad.push_event(event).await;
}
lost_seqnum = (lost_seqnum + n_packets) & 0xffff;
state.last_popped_pts += gst::ClockTime(Some((n_packets * spacing) as u64));
state.num_lost += n_packets as u64;
if !ret {
return Err(gst::FlowError::Error);
}
}
while lost_seqnum != seqnum as i64 {
let timestamp = state.last_popped_pts + gst::ClockTime(Some(spacing as u64));
let duration = if state.equidistant > 0 { spacing } else { 0 };
state.last_popped_pts = timestamp;
if do_lost {
let s = gst::Structure::new(
"GstRTPPacketLost",
&[
("seqnum", &(lost_seqnum as u32)),
("timestamp", ×tamp),
("duration", &(duration as u64)),
("retry", &0),
],
);
let event = gst::Event::new_custom_downstream(s).build();
ret = self.src_pad.push_event(event).await;
}
state.num_lost += 1;
if !ret {
break;
}
lost_seqnum = (lost_seqnum + 1) & 0xffff;
}
}
}
if ret {
Ok(gst::FlowSuccess::Ok)
} else {
Err(gst::FlowError::Error)
}
}
async fn pop_and_push(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut discont = false;
let (jb_item, _) = state.jbuf.borrow().pop();
let dts = jb_item.get_dts();
let pts = jb_item.get_pts();
let seq = jb_item.get_seqnum();
let mut buffer = jb_item.get_buffer();
let buffer = buffer.make_mut();
buffer.set_dts(state.segment.to_running_time(dts));
buffer.set_pts(state.segment.to_running_time(pts));
if state.last_popped_pts.is_some() && buffer.get_pts() < state.last_popped_pts {
buffer.set_pts(state.last_popped_pts)
}
self.push_lost_events(state, element, seq, pts, &mut discont)
.await?;
if state.discont {
discont = true;
state.discont = false;
}
state.last_popped_pts = buffer.get_pts();
state.last_popped_seqnum = seq;
if discont {
buffer.set_flags(gst::BufferFlags::DISCONT);
}
state.num_pushed += 1;
gst_debug!(CAT, obj: self.src_pad.gst_pad(), "Pushing {:?} with seq {}", buffer, seq);
self.src_pad.push(buffer.to_owned()).await
}
async fn schedule(&self, state: &mut MutexGuard<'_, State>, element: &gst::Element) {
let (latency_ns, context_wait_ns) = {
let settings = self.settings.lock().await;
(
settings.latency_ms as u64 * gst::MSECOND,
settings.context_wait as u64 * gst::MSECOND,
)
};
let now = self.get_current_running_time(element);
gst_debug!(
CAT,
obj: element,
"now is {}, earliest pts is {}, packet_spacing {} and latency {}",
now,
state.earliest_pts,
state.packet_spacing,
latency_ns
);
if state.earliest_pts.is_none() {
return;
}
let next_wakeup = state.earliest_pts + latency_ns - state.packet_spacing;
let delay = {
if next_wakeup > now {
(next_wakeup - now).nseconds().unwrap()
} else {
0
}
};
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
if let Some(wakeup_join_handle) = state.wakeup_join_handle.take() {
let _ = wakeup_join_handle.await;
}
gst_debug!(CAT, obj: element, "Scheduling wakeup in {}", delay);
let (wakeup_fut, abort_handle) = abortable(Self::wakeup_fut(
Duration::from_nanos(delay),
latency_ns,
context_wait_ns,
&element,
self.src_pad.downgrade(),
));
state.wakeup_join_handle = Some(self.src_pad.spawn(wakeup_fut));
state.wakeup_abort_handle = Some(abort_handle);
}
fn wakeup_fut(
delay: Duration,
latency_ns: gst::ClockTime,
context_wait_ns: gst::ClockTime,
element: &gst::Element,
pad_src_weak: PadSrcWeak,
) -> BoxFuture<'static, ()> {
let element = element.clone();
async move {
runtime::time::delay_for(delay).await;
let jb = Self::from_instance(&element);
let mut state = jb.state.lock().await;
let pad_src = match pad_src_weak.upgrade() {
Some(pad_src) => pad_src,
None => return,
};
let pad_ctx = pad_src.pad_context();
let pad_ctx = match pad_ctx.upgrade() {
Some(pad_ctx) => pad_ctx,
None => return,
};
let now = jb.get_current_running_time(&element);
gst_debug!(
CAT,
obj: &element,
"Woke back up, earliest_pts {}",
state.earliest_pts
);
/* Check earliest PTS as we have just taken the lock */
if state.earliest_pts.is_some()
&& state.earliest_pts + latency_ns - state.packet_spacing - context_wait_ns / 2
< now
{
loop {
let (head_pts, head_seq) = state.jbuf.borrow().peek();
state.last_res = jb.pop_and_push(&mut state, &element).await;
if let Some(drain_fut) = pad_ctx.drain_pending_tasks() {
let (abortable_drain, abort_handle) = abortable(drain_fut);
state.task_queue_abort_handle = Some(abort_handle);
pad_src.spawn(abortable_drain.map(drop));
} else {
state.task_queue_abort_handle = None;
}
let has_pending_tasks = state.task_queue_abort_handle.is_some();
if head_pts == state.earliest_pts && head_seq == state.earliest_seqnum as u32 {
let (earliest_pts, earliest_seqnum) = state.jbuf.borrow().find_earliest();
state.earliest_pts = earliest_pts;
state.earliest_seqnum = earliest_seqnum as u16;
}
if has_pending_tasks
|| state.earliest_pts.is_none()
|| state.earliest_pts + latency_ns - state.packet_spacing >= now
{
break;
}
}
}
jb.schedule(&mut state, &element).await;
}
.boxed()
}
async fn enqueue_item(
&self,
pad: &gst::Pad,
element: &gst::Element,
buffer: Option<gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state = self.state.lock().await;
let mut buffers = VecDeque::new();
if let Some(buf) = buffer {
buffers.push_back(buf);
}
// This is to avoid recursion with `store`, `reset` and `enqueue_item`
while let Some(buf) = buffers.pop_front() {
if let Err(err) = self.store(&mut state, pad, element, buf).await {
match err {
gst::FlowError::CustomError => {
for gap_packet in &self.reset(&mut state, element) {
buffers.push_back(gap_packet.0.to_owned());
}
}
other => return Err(other),
}
}
}
self.schedule(&mut state, element).await;
state.last_res
}
async fn drain(&self, state: &mut MutexGuard<'_, State>, element: &gst::Element) -> bool {
let mut ret = true;
loop {
let (head_pts, _) = state.jbuf.borrow().peek();
if head_pts == gst::CLOCK_TIME_NONE {
break;
}
if self.pop_and_push(state, element).await.is_err() {
ret = false;
break;
}
}
ret
}
async fn flush(&self, element: &gst::Element) {
let mut state = self.state.lock().await;
gst_info!(CAT, obj: element, "Flushing");
*state = State::default();
}
async fn clear_pt_map(&self, element: &gst::Element) {
gst_info!(CAT, obj: element, "Clearing PT map");
let mut state = self.state.lock().await;
state.clock_rate = -1;
state.jbuf.borrow().reset_skew();
}
}
impl ObjectSubclass for JitterBuffer {
const NAME: &'static str = "RsTsJitterBuffer";
type ParentType = gst::Element;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib_object_subclass!();
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"Thread-sharing jitterbuffer",
"Generic",
"Simple jitterbuffer",
"Mathieu Duponchelle <mathieu@centricular.com>",
);
let caps = gst::Caps::new_any();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(sink_pad_template);
klass.add_signal(
"request-pt-map",
glib::SignalFlags::RUN_LAST,
&[u32::static_type()],
gst::Caps::static_type(),
);
klass.add_signal_with_class_handler(
"clear-pt-map",
glib::SignalFlags::RUN_LAST | glib::SignalFlags::ACTION,
&[],
glib::types::Type::Unit,
|_, args| {
let element = args[0]
.get::<gst::Element>()
.expect("signal arg")
.expect("missing signal arg");
let jitterbuffer = Self::from_instance(&element);
runtime::executor::block_on(jitterbuffer.clear_pt_map(&element));
None
},
);
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
fn new_with_class(klass: &subclass::simple::ClassStruct<Self>) -> Self {
let templ = klass.get_pad_template("sink").unwrap();
let sink_pad = PadSink::new_from_template(&templ, Some("sink"));
let templ = klass.get_pad_template("src").unwrap();
let src_pad = PadSrc::new_from_template(&templ, Some("src"));
Self {
sink_pad,
src_pad,
state: Mutex::new(State::default()),
settings: Mutex::new(Settings::default()),
}
}
}
impl ObjectImpl for JitterBuffer {
glib_object_impl!();
fn set_property(&self, _obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("latency", ..) => {
let latency_ms = {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.latency_ms = value.get_some().expect("type checked upstream");
settings.latency_ms as u64
};
runtime::executor::block_on(self.state.lock())
.jbuf
.borrow()
.set_delay(latency_ms * gst::MSECOND);
/* TODO: post message */
}
subclass::Property("do-lost", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.do_lost = value.get_some().expect("type checked upstream");
}
subclass::Property("max-dropout-time", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.max_dropout_time = value.get_some().expect("type checked upstream");
}
subclass::Property("max-misorder-time", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.max_misorder_time = value.get_some().expect("type checked upstream");
}
subclass::Property("context", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.context = value
.get()
.expect("type checked upstream")
.unwrap_or_else(|| "".into());
}
subclass::Property("context-wait", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.context_wait = value.get_some().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("latency", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.latency_ms.to_value())
}
subclass::Property("do-lost", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.do_lost.to_value())
}
subclass::Property("max-dropout-time", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.max_dropout_time.to_value())
}
subclass::Property("max-misorder-time", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.max_misorder_time.to_value())
}
subclass::Property("stats", ..) => {
let state = runtime::executor::block_on(self.state.lock());
let s = gst::Structure::new(
"application/x-rtp-jitterbuffer-stats",
&[
("num-pushed", &state.num_pushed),
("num-lost", &state.num_lost),
("num-late", &state.num_late),
],
);
Ok(s.to_value())
}
subclass::Property("context", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.context.to_value())
}
subclass::Property("context-wait", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.context_wait.to_value())
}
_ => unimplemented!(),
}
}
fn constructed(&self, obj: &glib::Object) {
self.parent_constructed(obj);
let element = obj.downcast_ref::<gst::Element>().unwrap();
element.add_pad(self.sink_pad.gst_pad()).unwrap();
element.add_pad(self.src_pad.gst_pad()).unwrap();
}
}
impl ElementImpl for JitterBuffer {
fn change_state(
&self,
element: &gst::Element,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst_trace!(CAT, obj: element, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => runtime::executor::block_on(async {
let _state = self.state.lock().await;
let context = {
let settings = self.settings.lock().await;
Context::acquire(&settings.context, settings.context_wait).unwrap()
};
let _ = self
.src_pad
.prepare(context, &JitterBufferPadSrcHandler)
.await
.map_err(|err| {
gst_error_msg!(
gst::ResourceError::OpenRead,
["Error preparing src_pad: {:?}", err]
);
gst::StateChangeError
});
self.sink_pad.prepare(&JitterBufferPadSinkHandler).await;
}),
gst::StateChange::PausedToReady => runtime::executor::block_on(async {
let mut state = self.state.lock().await;
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
if let Some(abort_handle) = state.task_queue_abort_handle.take() {
abort_handle.abort();
}
}),
gst::StateChange::ReadyToNull => runtime::executor::block_on(async {
let mut state = self.state.lock().await;
self.sink_pad.unprepare().await;
let _ = self.src_pad.unprepare().await;
state.jbuf.borrow().flush();
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
}),
_ => (),
}
self.parent_change_state(element, transition)
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ts-jitterbuffer",
gst::Rank::None,
JitterBuffer::get_type(),
)
}
jitterbuffer: don't try to lock in query handlers
Instead, add position and latency fields to the PadSrcHandler
Fixes #93
// Copyright (C) 2018 Sebastian Dröge <sebastian@centricular.com>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Library General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public
// License along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
// Boston, MA 02110-1335, USA.
use either::Either;
use futures::future::BoxFuture;
use futures::future::{abortable, AbortHandle, Aborted};
use futures::lock::{Mutex, MutexGuard};
use futures::prelude::*;
use glib;
use glib::prelude::*;
use glib::subclass;
use glib::subclass::prelude::*;
use glib::{glib_object_impl, glib_object_subclass};
use gst;
use gst::prelude::*;
use gst::subclass::prelude::*;
use gst::{gst_debug, gst_error_msg, gst_info, gst_log, gst_trace};
use gst_rtp::RTPBuffer;
use lazy_static::lazy_static;
use std::cmp::{max, min, Ordering};
use std::collections::{BTreeSet, VecDeque};
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering::Relaxed;
use std::sync::Arc;
use std::time::Duration;
use crate::runtime::prelude::*;
use crate::runtime::{
self, Context, JoinHandle, PadContext, PadSink, PadSinkRef, PadSrc, PadSrcRef, PadSrcWeak,
};
use super::{RTPJitterBuffer, RTPJitterBufferItem, RTPPacketRateCtx};
const DEFAULT_LATENCY_MS: u32 = 200;
const DEFAULT_DO_LOST: bool = false;
const DEFAULT_MAX_DROPOUT_TIME: u32 = 60000;
const DEFAULT_MAX_MISORDER_TIME: u32 = 2000;
const DEFAULT_CONTEXT: &str = "";
const DEFAULT_CONTEXT_WAIT: u32 = 0;
#[derive(Debug, Clone)]
struct Settings {
latency_ms: u32,
do_lost: bool,
max_dropout_time: u32,
max_misorder_time: u32,
context: String,
context_wait: u32,
}
impl Default for Settings {
fn default() -> Self {
Settings {
latency_ms: DEFAULT_LATENCY_MS,
do_lost: DEFAULT_DO_LOST,
max_dropout_time: DEFAULT_MAX_DROPOUT_TIME,
max_misorder_time: DEFAULT_MAX_MISORDER_TIME,
context: DEFAULT_CONTEXT.into(),
context_wait: DEFAULT_CONTEXT_WAIT,
}
}
}
static PROPERTIES: [subclass::Property; 7] = [
subclass::Property("latency", |name| {
glib::ParamSpec::uint(
name,
"Buffer latency in ms",
"Amount of ms to buffer",
0,
std::u32::MAX,
DEFAULT_LATENCY_MS,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("do-lost", |name| {
glib::ParamSpec::boolean(
name,
"Do Lost",
"Send an event downstream when a packet is lost",
DEFAULT_DO_LOST,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("max-dropout-time", |name| {
glib::ParamSpec::uint(
name,
"Max dropout time",
"The maximum time (milliseconds) of missing packets tolerated.",
0,
std::u32::MAX,
DEFAULT_MAX_DROPOUT_TIME,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("max-misorder-time", |name| {
glib::ParamSpec::uint(
name,
"Max misorder time",
"The maximum time (milliseconds) of misordered packets tolerated.",
0,
std::u32::MAX,
DEFAULT_MAX_MISORDER_TIME,
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("stats", |name| {
glib::ParamSpec::boxed(
name,
"Statistics",
"Various statistics",
gst::Structure::static_type(),
glib::ParamFlags::READABLE,
)
}),
subclass::Property("context", |name| {
glib::ParamSpec::string(
name,
"Context",
"Context name to share threads with",
Some(DEFAULT_CONTEXT),
glib::ParamFlags::READWRITE,
)
}),
subclass::Property("context-wait", |name| {
glib::ParamSpec::uint(
name,
"Context Wait",
"Throttle poll loop to run at most once every this many ms",
0,
1000,
DEFAULT_CONTEXT_WAIT,
glib::ParamFlags::READWRITE,
)
}),
];
#[derive(Clone, Debug)]
struct JitterBufferPadSinkHandler;
impl PadSinkHandler for JitterBufferPadSinkHandler {
type ElementImpl = JitterBuffer;
fn sink_chain(
&self,
pad: &PadSinkRef,
_jitterbuffer: &JitterBuffer,
element: &gst::Element,
buffer: gst::Buffer,
) -> BoxFuture<'static, Result<gst::FlowSuccess, gst::FlowError>> {
let pad_weak = pad.downgrade();
let element = element.clone();
async move {
let pad = pad_weak.upgrade().expect("PadSink no longer exists");
gst_debug!(CAT, obj: pad.gst_pad(), "Handling {:?}", buffer);
let jitterbuffer = JitterBuffer::from_instance(&element);
jitterbuffer
.enqueue_item(pad.gst_pad(), &element, Some(buffer))
.await
}
.boxed()
}
fn sink_event(
&self,
pad: &PadSinkRef,
jitterbuffer: &JitterBuffer,
element: &gst::Element,
event: gst::Event,
) -> Either<bool, BoxFuture<'static, bool>> {
use gst::EventView;
if event.is_serialized() {
let pad_weak = pad.downgrade();
let element = element.clone();
Either::Right(
async move {
let pad = pad_weak.upgrade().expect("PadSink no longer exists");
let mut forward = true;
gst_log!(CAT, obj: pad.gst_pad(), "Handling {:?}", event);
let jitterbuffer = JitterBuffer::from_instance(&element);
match event.view() {
EventView::FlushStop(..) => {
jitterbuffer.flush(&element).await;
}
EventView::Segment(e) => {
let mut state = jitterbuffer.state.lock().await;
state.segment = e
.get_segment()
.clone()
.downcast::<gst::format::Time>()
.unwrap();
}
EventView::Eos(..) => {
let mut state = jitterbuffer.state.lock().await;
jitterbuffer.drain(&mut state, &element).await;
}
EventView::CustomDownstreamSticky(e) => {
if PadContext::is_pad_context_sticky_event(&e) {
forward = false;
}
}
_ => (),
};
if forward {
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding serialized {:?}", event);
jitterbuffer.src_pad.push_event(event).await
} else {
true
}
}
.boxed(),
)
} else {
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding non-serialized {:?}", event);
Either::Left(jitterbuffer.src_pad.gst_pad().push_event(event))
}
}
fn sink_query(
&self,
pad: &PadSinkRef,
jitterbuffer: &JitterBuffer,
element: &gst::Element,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryView;
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding {:?}", query);
match query.view_mut() {
QueryView::Drain(..) => {
gst_info!(CAT, obj: pad.gst_pad(), "Draining");
runtime::executor::block_on(jitterbuffer.enqueue_item(pad.gst_pad(), element, None))
.is_ok()
}
_ => jitterbuffer.src_pad.gst_pad().peer_query(query),
}
}
}
#[derive(Debug)]
struct JitterBufferPadSrcHandlerInner {
latency: gst::ClockTime,
position: AtomicU64,
}
#[derive(Clone, Debug)]
struct JitterBufferPadSrcHandler(Arc<JitterBufferPadSrcHandlerInner>);
impl JitterBufferPadSrcHandler {
fn new(latency: gst::ClockTime) -> Self {
JitterBufferPadSrcHandler(Arc::new(JitterBufferPadSrcHandlerInner {
latency,
position: AtomicU64::new(std::u64::MAX),
}))
}
}
impl PadSrcHandler for JitterBufferPadSrcHandler {
type ElementImpl = JitterBuffer;
fn src_query(
&self,
pad: &PadSrcRef,
jitterbuffer: &JitterBuffer,
_element: &gst::Element,
query: &mut gst::QueryRef,
) -> bool {
use gst::QueryView;
gst_log!(CAT, obj: pad.gst_pad(), "Forwarding {:?}", query);
match query.view_mut() {
QueryView::Latency(ref mut q) => {
let mut peer_query = gst::query::Query::new_latency();
let ret = jitterbuffer.sink_pad.gst_pad().peer_query(&mut peer_query);
if ret {
let (_, mut min_latency, _) = peer_query.get_result();
min_latency += self.0.latency;
let max_latency = gst::CLOCK_TIME_NONE;
q.set(true, min_latency, max_latency);
}
ret
}
QueryView::Position(ref mut q) => {
if q.get_format() != gst::Format::Time {
jitterbuffer.sink_pad.gst_pad().peer_query(query)
} else {
let position = self.0.position.load(Relaxed);
q.set(gst::ClockTime(Some(position)));
true
}
}
_ => jitterbuffer.sink_pad.gst_pad().peer_query(query),
}
}
}
#[derive(Eq)]
struct GapPacket(gst::Buffer);
impl Ord for GapPacket {
fn cmp(&self, other: &Self) -> Ordering {
let mut rtp_buffer = RTPBuffer::from_buffer_readable(&self.0).unwrap();
let mut other_rtp_buffer = RTPBuffer::from_buffer_readable(&other.0).unwrap();
let seq = rtp_buffer.get_seq();
let other_seq = other_rtp_buffer.get_seq();
drop(rtp_buffer);
drop(other_rtp_buffer);
0.cmp(&gst_rtp::compare_seqnum(seq, other_seq))
}
}
impl PartialOrd for GapPacket {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl PartialEq for GapPacket {
fn eq(&self, other: &Self) -> bool {
self.cmp(other) == Ordering::Equal
}
}
struct State {
jbuf: glib::SendUniqueCell<RTPJitterBuffer>,
packet_rate_ctx: RTPPacketRateCtx,
clock_rate: i32,
segment: gst::FormattedSegment<gst::ClockTime>,
ips_rtptime: u32,
ips_pts: gst::ClockTime,
last_pt: u32,
last_in_seqnum: u32,
packet_spacing: gst::ClockTime,
gap_packets: Option<BTreeSet<GapPacket>>,
last_popped_seqnum: u32,
num_pushed: u64,
num_lost: u64,
num_late: u64,
last_rtptime: u32,
equidistant: i32,
earliest_pts: gst::ClockTime,
earliest_seqnum: u16,
last_popped_pts: gst::ClockTime,
discont: bool,
last_res: Result<gst::FlowSuccess, gst::FlowError>,
task_queue_abort_handle: Option<AbortHandle>,
wakeup_abort_handle: Option<AbortHandle>,
wakeup_join_handle: Option<JoinHandle<Result<(), Aborted>>>,
src_pad_handler: JitterBufferPadSrcHandler,
}
impl Default for State {
fn default() -> State {
State {
jbuf: glib::SendUniqueCell::new(RTPJitterBuffer::new()).unwrap(),
packet_rate_ctx: RTPPacketRateCtx::new(),
clock_rate: -1,
segment: gst::FormattedSegment::<gst::ClockTime>::new(),
ips_rtptime: 0,
ips_pts: gst::CLOCK_TIME_NONE,
last_pt: std::u32::MAX,
last_in_seqnum: std::u32::MAX,
packet_spacing: gst::ClockTime(Some(0)),
gap_packets: Some(BTreeSet::new()),
last_popped_seqnum: std::u32::MAX,
num_pushed: 0,
num_lost: 0,
num_late: 0,
last_rtptime: std::u32::MAX,
equidistant: 0,
earliest_pts: gst::CLOCK_TIME_NONE,
earliest_seqnum: 0,
last_popped_pts: gst::CLOCK_TIME_NONE,
discont: false,
last_res: Ok(gst::FlowSuccess::Ok),
task_queue_abort_handle: None,
wakeup_abort_handle: None,
wakeup_join_handle: None,
src_pad_handler: JitterBufferPadSrcHandler::new(
DEFAULT_LATENCY_MS as u64 * gst::MSECOND,
),
}
}
}
struct JitterBuffer {
sink_pad: PadSink,
src_pad: PadSrc,
state: Mutex<State>,
settings: Mutex<Settings>,
}
lazy_static! {
static ref CAT: gst::DebugCategory = gst::DebugCategory::new(
"ts-jitterbuffer",
gst::DebugColorFlags::empty(),
Some("Thread-sharing jitterbuffer"),
);
}
impl JitterBuffer {
fn get_current_running_time(&self, element: &gst::Element) -> gst::ClockTime {
if let Some(clock) = element.get_clock() {
if clock.get_time() > element.get_base_time() {
clock.get_time() - element.get_base_time()
} else {
gst::ClockTime(Some(0))
}
} else {
gst::CLOCK_TIME_NONE
}
}
fn parse_caps(
&self,
state: &mut MutexGuard<State>,
element: &gst::Element,
caps: &gst::Caps,
pt: u8,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let s = caps.get_structure(0).ok_or(gst::FlowError::Error)?;
gst_info!(CAT, obj: element, "Parsing {:?}", caps);
let payload = s
.get_some::<i32>("payload")
.map_err(|_| gst::FlowError::Error)?;
if pt != 0 && payload as u8 != pt {
return Err(gst::FlowError::Error);
}
state.last_pt = pt as u32;
state.clock_rate = s
.get_some::<i32>("clock-rate")
.map_err(|_| gst::FlowError::Error)?;
if state.clock_rate <= 0 {
return Err(gst::FlowError::Error);
}
let clock_rate = state.clock_rate;
state.packet_rate_ctx.reset(clock_rate);
state.jbuf.borrow().set_clock_rate(clock_rate as u32);
Ok(gst::FlowSuccess::Ok)
}
fn calculate_packet_spacing(
&self,
state: &mut MutexGuard<State>,
rtptime: u32,
pts: gst::ClockTime,
) {
if state.ips_rtptime != rtptime {
if state.ips_pts.is_some() && pts.is_some() {
let new_packet_spacing = pts - state.ips_pts;
let old_packet_spacing = state.packet_spacing;
if old_packet_spacing > new_packet_spacing {
state.packet_spacing = (new_packet_spacing + 3 * old_packet_spacing) / 4;
} else if old_packet_spacing > gst::ClockTime(Some(0)) {
state.packet_spacing = (3 * new_packet_spacing + old_packet_spacing) / 4;
} else {
state.packet_spacing = new_packet_spacing;
}
gst_debug!(
CAT,
"new packet spacing {}, old packet spacing {} combined to {}",
new_packet_spacing,
old_packet_spacing,
state.packet_spacing
);
}
state.ips_rtptime = rtptime;
state.ips_pts = pts;
}
}
fn handle_big_gap_buffer(
&self,
state: &mut MutexGuard<State>,
element: &gst::Element,
buffer: gst::Buffer,
pt: u8,
) -> bool {
let gap_packets = state.gap_packets.as_mut().unwrap();
let gap_packets_length = gap_packets.len();
let mut reset = false;
gst_debug!(
CAT,
obj: element,
"Handling big gap, gap packets length: {}",
gap_packets_length
);
gap_packets.insert(GapPacket(buffer));
if gap_packets_length > 0 {
let mut prev_gap_seq = std::u32::MAX;
let mut all_consecutive = true;
for gap_packet in gap_packets.iter() {
let mut rtp_buffer = RTPBuffer::from_buffer_readable(&gap_packet.0).unwrap();
let gap_pt = rtp_buffer.get_payload_type();
let gap_seq = rtp_buffer.get_seq();
gst_log!(
CAT,
obj: element,
"Looking at gap packet with seq {}",
gap_seq
);
drop(rtp_buffer);
all_consecutive = gap_pt == pt;
if prev_gap_seq == std::u32::MAX {
prev_gap_seq = gap_seq as u32;
} else if gst_rtp::compare_seqnum(gap_seq, prev_gap_seq as u16) != -1 {
all_consecutive = false;
} else {
prev_gap_seq = gap_seq as u32;
}
if !all_consecutive {
break;
}
}
gst_debug!(CAT, obj: element, "all consecutive: {}", all_consecutive);
if all_consecutive && gap_packets_length > 3 {
reset = true;
} else if !all_consecutive {
gap_packets.clear();
}
}
reset
}
fn reset(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
) -> BTreeSet<GapPacket> {
gst_info!(CAT, obj: element, "Resetting");
state.jbuf.borrow().flush();
state.jbuf.borrow().reset_skew();
state.discont = true;
state.last_popped_seqnum = std::u32::MAX;
state.last_in_seqnum = std::u32::MAX;
state.ips_rtptime = 0;
state.ips_pts = gst::CLOCK_TIME_NONE;
let gap_packets = state.gap_packets.take();
state.gap_packets = Some(BTreeSet::new());
// Handle gap_packets in caller to avoid recursion
gap_packets.unwrap()
}
async fn store(
&self,
state: &mut MutexGuard<'_, State>,
pad: &gst::Pad,
element: &gst::Element,
buffer: gst::Buffer,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let (max_misorder_time, max_dropout_time) = {
let settings = self.settings.lock().await;
(settings.max_misorder_time, settings.max_dropout_time)
};
let (seq, rtptime, pt) = {
let mut rtp_buffer =
RTPBuffer::from_buffer_readable(&buffer).map_err(|_| gst::FlowError::Error)?;
(
rtp_buffer.get_seq(),
rtp_buffer.get_timestamp(),
rtp_buffer.get_payload_type(),
)
};
let mut pts = buffer.get_pts();
let mut dts = buffer.get_dts();
let mut estimated_dts = false;
gst_log!(
CAT,
obj: element,
"Storing buffer, seq: {}, rtptime: {}, pt: {}",
seq,
rtptime,
pt
);
if dts == gst::CLOCK_TIME_NONE {
dts = pts;
} else if pts == gst::CLOCK_TIME_NONE {
pts = dts;
}
if dts == gst::CLOCK_TIME_NONE {
dts = self.get_current_running_time(element);
pts = dts;
estimated_dts = state.clock_rate != -1;
} else {
dts = state.segment.to_running_time(dts);
}
if state.clock_rate == -1 {
state.ips_rtptime = rtptime;
state.ips_pts = pts;
}
if state.last_pt != pt as u32 {
state.last_pt = pt as u32;
state.clock_rate = -1;
gst_debug!(CAT, obj: pad, "New payload type: {}", pt);
if let Some(caps) = pad.get_current_caps() {
self.parse_caps(state, element, &caps, pt)?;
}
}
if state.clock_rate == -1 {
let caps = element
.emit("request-pt-map", &[&(pt as u32)])
.map_err(|_| gst::FlowError::Error)?
.ok_or(gst::FlowError::Error)?
.get::<gst::Caps>()
.map_err(|_| gst::FlowError::Error)?
.ok_or(gst::FlowError::Error)?;
self.parse_caps(state, element, &caps, pt)?;
}
state.packet_rate_ctx.update(seq, rtptime);
let max_dropout = state
.packet_rate_ctx
.get_max_dropout(max_dropout_time as i32);
let max_misorder = state
.packet_rate_ctx
.get_max_dropout(max_misorder_time as i32);
pts = state.jbuf.borrow().calculate_pts(
dts,
estimated_dts,
rtptime,
element.get_base_time(),
0,
false,
);
if pts.is_none() {
gst_debug!(
CAT,
obj: element,
"cannot calculate a valid pts for #{}, discard",
seq
);
return Ok(gst::FlowSuccess::Ok);
}
if state.last_in_seqnum != std::u32::MAX {
let gap = gst_rtp::compare_seqnum(state.last_in_seqnum as u16, seq);
if gap == 1 {
self.calculate_packet_spacing(state, rtptime, pts);
} else {
if (gap != -1 && gap < -(max_misorder as i32)) || (gap >= max_dropout as i32) {
let reset = self.handle_big_gap_buffer(state, element, buffer, pt);
if reset {
// Handle reset in `enqueue_item` to avoid recursion
return Err(gst::FlowError::CustomError);
} else {
return Ok(gst::FlowSuccess::Ok);
}
}
state.ips_pts = gst::CLOCK_TIME_NONE;
state.ips_rtptime = 0;
}
state.gap_packets.as_mut().unwrap().clear();
}
if state.last_popped_seqnum != std::u32::MAX {
let gap = gst_rtp::compare_seqnum(state.last_popped_seqnum as u16, seq);
if gap <= 0 {
state.num_late += 1;
gst_debug!(CAT, obj: element, "Dropping late {}", seq);
return Ok(gst::FlowSuccess::Ok);
}
}
state.last_in_seqnum = seq as u32;
let jb_item = if estimated_dts {
RTPJitterBufferItem::new(buffer, gst::CLOCK_TIME_NONE, pts, seq as u32, rtptime)
} else {
RTPJitterBufferItem::new(buffer, dts, pts, seq as u32, rtptime)
};
let (success, _, _) = state.jbuf.borrow().insert(jb_item);
if !success {
/* duplicate */
return Ok(gst::FlowSuccess::Ok);
}
if rtptime == state.last_rtptime {
state.equidistant -= 2;
} else {
state.equidistant += 1;
}
state.equidistant = min(max(state.equidistant, -7), 7);
state.last_rtptime = rtptime;
if state.earliest_pts.is_none()
|| (pts.is_some()
&& (pts < state.earliest_pts
|| (pts == state.earliest_pts && seq > state.earliest_seqnum)))
{
state.earliest_pts = pts;
state.earliest_seqnum = seq;
}
gst_log!(CAT, obj: pad, "Stored buffer");
Ok(gst::FlowSuccess::Ok)
}
async fn push_lost_events(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
seqnum: u32,
pts: gst::ClockTime,
discont: &mut bool,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let (latency_ns, do_lost) = {
let settings = self.settings.lock().await;
(
settings.latency_ms as i64 * gst::MSECOND.nseconds().unwrap() as i64,
settings.do_lost,
)
};
let mut ret = true;
gst_debug!(
CAT,
obj: element,
"Pushing lost events seq: {}, last popped seq: {}",
seqnum,
state.last_popped_seqnum
);
if state.last_popped_seqnum != std::u32::MAX {
let mut lost_seqnum = ((state.last_popped_seqnum + 1) & 0xffff) as i64;
let gap = gst_rtp::compare_seqnum(lost_seqnum as u16, seqnum as u16) as i64;
if gap > 0 {
let interval = pts.nseconds().unwrap() as i64
- state.last_popped_pts.nseconds().unwrap() as i64;
let spacing = if interval >= 0 {
interval / (gap as i64 + 1)
} else {
0
};
*discont = true;
if state.equidistant > 0 && gap > 1 && gap * spacing > latency_ns {
let n_packets = gap - latency_ns / spacing;
if do_lost {
let s = gst::Structure::new(
"GstRTPPacketLost",
&[
("seqnum", &(lost_seqnum as u32)),
(
"timestamp",
&(state.last_popped_pts + gst::ClockTime(Some(spacing as u64))),
),
("duration", &((n_packets * spacing) as u64)),
("retry", &0),
],
);
let event = gst::Event::new_custom_downstream(s).build();
ret = self.src_pad.push_event(event).await;
}
lost_seqnum = (lost_seqnum + n_packets) & 0xffff;
state.last_popped_pts += gst::ClockTime(Some((n_packets * spacing) as u64));
state.num_lost += n_packets as u64;
if !ret {
return Err(gst::FlowError::Error);
}
}
while lost_seqnum != seqnum as i64 {
let timestamp = state.last_popped_pts + gst::ClockTime(Some(spacing as u64));
let duration = if state.equidistant > 0 { spacing } else { 0 };
state.last_popped_pts = timestamp;
if do_lost {
let s = gst::Structure::new(
"GstRTPPacketLost",
&[
("seqnum", &(lost_seqnum as u32)),
("timestamp", ×tamp),
("duration", &(duration as u64)),
("retry", &0),
],
);
let event = gst::Event::new_custom_downstream(s).build();
ret = self.src_pad.push_event(event).await;
}
state.num_lost += 1;
if !ret {
break;
}
lost_seqnum = (lost_seqnum + 1) & 0xffff;
}
}
}
if ret {
Ok(gst::FlowSuccess::Ok)
} else {
Err(gst::FlowError::Error)
}
}
async fn pop_and_push(
&self,
state: &mut MutexGuard<'_, State>,
element: &gst::Element,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut discont = false;
let (jb_item, _) = state.jbuf.borrow().pop();
let dts = jb_item.get_dts();
let pts = jb_item.get_pts();
let seq = jb_item.get_seqnum();
let mut buffer = jb_item.get_buffer();
let buffer = buffer.make_mut();
buffer.set_dts(state.segment.to_running_time(dts));
buffer.set_pts(state.segment.to_running_time(pts));
if state.last_popped_pts.is_some() && buffer.get_pts() < state.last_popped_pts {
buffer.set_pts(state.last_popped_pts)
}
self.push_lost_events(state, element, seq, pts, &mut discont)
.await?;
if state.discont {
discont = true;
state.discont = false;
}
state.last_popped_pts = buffer.get_pts();
if let Some(pts) = state.last_popped_pts.nseconds() {
state.src_pad_handler.0.position.store(pts, Relaxed);
}
state.last_popped_seqnum = seq;
if discont {
buffer.set_flags(gst::BufferFlags::DISCONT);
}
state.num_pushed += 1;
gst_debug!(CAT, obj: self.src_pad.gst_pad(), "Pushing {:?} with seq {}", buffer, seq);
self.src_pad.push(buffer.to_owned()).await
}
async fn schedule(&self, state: &mut MutexGuard<'_, State>, element: &gst::Element) {
let (latency_ns, context_wait_ns) = {
let settings = self.settings.lock().await;
(
settings.latency_ms as u64 * gst::MSECOND,
settings.context_wait as u64 * gst::MSECOND,
)
};
let now = self.get_current_running_time(element);
gst_debug!(
CAT,
obj: element,
"now is {}, earliest pts is {}, packet_spacing {} and latency {}",
now,
state.earliest_pts,
state.packet_spacing,
latency_ns
);
if state.earliest_pts.is_none() {
return;
}
let next_wakeup = state.earliest_pts + latency_ns - state.packet_spacing;
let delay = {
if next_wakeup > now {
(next_wakeup - now).nseconds().unwrap()
} else {
0
}
};
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
if let Some(wakeup_join_handle) = state.wakeup_join_handle.take() {
let _ = wakeup_join_handle.await;
}
gst_debug!(CAT, obj: element, "Scheduling wakeup in {}", delay);
let (wakeup_fut, abort_handle) = abortable(Self::wakeup_fut(
Duration::from_nanos(delay),
latency_ns,
context_wait_ns,
&element,
self.src_pad.downgrade(),
));
state.wakeup_join_handle = Some(self.src_pad.spawn(wakeup_fut));
state.wakeup_abort_handle = Some(abort_handle);
}
fn wakeup_fut(
delay: Duration,
latency_ns: gst::ClockTime,
context_wait_ns: gst::ClockTime,
element: &gst::Element,
pad_src_weak: PadSrcWeak,
) -> BoxFuture<'static, ()> {
let element = element.clone();
async move {
runtime::time::delay_for(delay).await;
let jb = Self::from_instance(&element);
let mut state = jb.state.lock().await;
let pad_src = match pad_src_weak.upgrade() {
Some(pad_src) => pad_src,
None => return,
};
let pad_ctx = pad_src.pad_context();
let pad_ctx = match pad_ctx.upgrade() {
Some(pad_ctx) => pad_ctx,
None => return,
};
let now = jb.get_current_running_time(&element);
gst_debug!(
CAT,
obj: &element,
"Woke back up, earliest_pts {}",
state.earliest_pts
);
/* Check earliest PTS as we have just taken the lock */
if state.earliest_pts.is_some()
&& state.earliest_pts + latency_ns - state.packet_spacing - context_wait_ns / 2
< now
{
loop {
let (head_pts, head_seq) = state.jbuf.borrow().peek();
state.last_res = jb.pop_and_push(&mut state, &element).await;
if let Some(drain_fut) = pad_ctx.drain_pending_tasks() {
let (abortable_drain, abort_handle) = abortable(drain_fut);
state.task_queue_abort_handle = Some(abort_handle);
pad_src.spawn(abortable_drain.map(drop));
} else {
state.task_queue_abort_handle = None;
}
let has_pending_tasks = state.task_queue_abort_handle.is_some();
if head_pts == state.earliest_pts && head_seq == state.earliest_seqnum as u32 {
let (earliest_pts, earliest_seqnum) = state.jbuf.borrow().find_earliest();
state.earliest_pts = earliest_pts;
state.earliest_seqnum = earliest_seqnum as u16;
}
if has_pending_tasks
|| state.earliest_pts.is_none()
|| state.earliest_pts + latency_ns - state.packet_spacing >= now
{
break;
}
}
}
jb.schedule(&mut state, &element).await;
}
.boxed()
}
async fn enqueue_item(
&self,
pad: &gst::Pad,
element: &gst::Element,
buffer: Option<gst::Buffer>,
) -> Result<gst::FlowSuccess, gst::FlowError> {
let mut state = self.state.lock().await;
let mut buffers = VecDeque::new();
if let Some(buf) = buffer {
buffers.push_back(buf);
}
// This is to avoid recursion with `store`, `reset` and `enqueue_item`
while let Some(buf) = buffers.pop_front() {
if let Err(err) = self.store(&mut state, pad, element, buf).await {
match err {
gst::FlowError::CustomError => {
for gap_packet in &self.reset(&mut state, element) {
buffers.push_back(gap_packet.0.to_owned());
}
}
other => return Err(other),
}
}
}
self.schedule(&mut state, element).await;
state.last_res
}
async fn drain(&self, state: &mut MutexGuard<'_, State>, element: &gst::Element) -> bool {
let mut ret = true;
loop {
let (head_pts, _) = state.jbuf.borrow().peek();
if head_pts == gst::CLOCK_TIME_NONE {
break;
}
if self.pop_and_push(state, element).await.is_err() {
ret = false;
break;
}
}
ret
}
async fn flush(&self, element: &gst::Element) {
let mut state = self.state.lock().await;
gst_info!(CAT, obj: element, "Flushing");
*state = State::default();
}
async fn clear_pt_map(&self, element: &gst::Element) {
gst_info!(CAT, obj: element, "Clearing PT map");
let mut state = self.state.lock().await;
state.clock_rate = -1;
state.jbuf.borrow().reset_skew();
}
}
impl ObjectSubclass for JitterBuffer {
const NAME: &'static str = "RsTsJitterBuffer";
type ParentType = gst::Element;
type Instance = gst::subclass::ElementInstanceStruct<Self>;
type Class = subclass::simple::ClassStruct<Self>;
glib_object_subclass!();
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
klass.set_metadata(
"Thread-sharing jitterbuffer",
"Generic",
"Simple jitterbuffer",
"Mathieu Duponchelle <mathieu@centricular.com>",
);
let caps = gst::Caps::new_any();
let sink_pad_template = gst::PadTemplate::new(
"sink",
gst::PadDirection::Sink,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(sink_pad_template);
klass.add_signal(
"request-pt-map",
glib::SignalFlags::RUN_LAST,
&[u32::static_type()],
gst::Caps::static_type(),
);
klass.add_signal_with_class_handler(
"clear-pt-map",
glib::SignalFlags::RUN_LAST | glib::SignalFlags::ACTION,
&[],
glib::types::Type::Unit,
|_, args| {
let element = args[0]
.get::<gst::Element>()
.expect("signal arg")
.expect("missing signal arg");
let jitterbuffer = Self::from_instance(&element);
runtime::executor::block_on(jitterbuffer.clear_pt_map(&element));
None
},
);
let src_pad_template = gst::PadTemplate::new(
"src",
gst::PadDirection::Src,
gst::PadPresence::Always,
&caps,
)
.unwrap();
klass.add_pad_template(src_pad_template);
klass.install_properties(&PROPERTIES);
}
fn new_with_class(klass: &subclass::simple::ClassStruct<Self>) -> Self {
let templ = klass.get_pad_template("sink").unwrap();
let sink_pad = PadSink::new_from_template(&templ, Some("sink"));
let templ = klass.get_pad_template("src").unwrap();
let src_pad = PadSrc::new_from_template(&templ, Some("src"));
Self {
sink_pad,
src_pad,
state: Mutex::new(State::default()),
settings: Mutex::new(Settings::default()),
}
}
}
impl ObjectImpl for JitterBuffer {
glib_object_impl!();
fn set_property(&self, _obj: &glib::Object, id: usize, value: &glib::Value) {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("latency", ..) => {
let latency_ms = {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.latency_ms = value.get_some().expect("type checked upstream");
settings.latency_ms as u64
};
runtime::executor::block_on(self.state.lock())
.jbuf
.borrow()
.set_delay(latency_ms * gst::MSECOND);
/* TODO: post message */
}
subclass::Property("do-lost", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.do_lost = value.get_some().expect("type checked upstream");
}
subclass::Property("max-dropout-time", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.max_dropout_time = value.get_some().expect("type checked upstream");
}
subclass::Property("max-misorder-time", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.max_misorder_time = value.get_some().expect("type checked upstream");
}
subclass::Property("context", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.context = value
.get()
.expect("type checked upstream")
.unwrap_or_else(|| "".into());
}
subclass::Property("context-wait", ..) => {
let mut settings = runtime::executor::block_on(self.settings.lock());
settings.context_wait = value.get_some().expect("type checked upstream");
}
_ => unimplemented!(),
}
}
fn get_property(&self, _obj: &glib::Object, id: usize) -> Result<glib::Value, ()> {
let prop = &PROPERTIES[id];
match *prop {
subclass::Property("latency", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.latency_ms.to_value())
}
subclass::Property("do-lost", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.do_lost.to_value())
}
subclass::Property("max-dropout-time", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.max_dropout_time.to_value())
}
subclass::Property("max-misorder-time", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.max_misorder_time.to_value())
}
subclass::Property("stats", ..) => {
let state = runtime::executor::block_on(self.state.lock());
let s = gst::Structure::new(
"application/x-rtp-jitterbuffer-stats",
&[
("num-pushed", &state.num_pushed),
("num-lost", &state.num_lost),
("num-late", &state.num_late),
],
);
Ok(s.to_value())
}
subclass::Property("context", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.context.to_value())
}
subclass::Property("context-wait", ..) => {
let settings = runtime::executor::block_on(self.settings.lock());
Ok(settings.context_wait.to_value())
}
_ => unimplemented!(),
}
}
fn constructed(&self, obj: &glib::Object) {
self.parent_constructed(obj);
let element = obj.downcast_ref::<gst::Element>().unwrap();
element.add_pad(self.sink_pad.gst_pad()).unwrap();
element.add_pad(self.src_pad.gst_pad()).unwrap();
}
}
impl ElementImpl for JitterBuffer {
fn change_state(
&self,
element: &gst::Element,
transition: gst::StateChange,
) -> Result<gst::StateChangeSuccess, gst::StateChangeError> {
gst_trace!(CAT, obj: element, "Changing state {:?}", transition);
match transition {
gst::StateChange::NullToReady => runtime::executor::block_on(async {
let mut state = self.state.lock().await;
let (context, latency) = {
let settings = self.settings.lock().await;
let context =
Context::acquire(&settings.context, settings.context_wait).unwrap();
let latency = settings.latency_ms as u64 * gst::MSECOND;
(context, latency)
};
state.src_pad_handler = JitterBufferPadSrcHandler::new(latency);
let _ = self
.src_pad
.prepare(context, &state.src_pad_handler)
.await
.map_err(|err| {
gst_error_msg!(
gst::ResourceError::OpenRead,
["Error preparing src_pad: {:?}", err]
);
gst::StateChangeError
});
self.sink_pad.prepare(&JitterBufferPadSinkHandler).await;
}),
gst::StateChange::PausedToReady => runtime::executor::block_on(async {
let mut state = self.state.lock().await;
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
if let Some(abort_handle) = state.task_queue_abort_handle.take() {
abort_handle.abort();
}
}),
gst::StateChange::ReadyToNull => runtime::executor::block_on(async {
let mut state = self.state.lock().await;
self.sink_pad.unprepare().await;
let _ = self.src_pad.unprepare().await;
state.jbuf.borrow().flush();
if let Some(wakeup_abort_handle) = state.wakeup_abort_handle.take() {
wakeup_abort_handle.abort();
}
}),
_ => (),
}
self.parent_change_state(element, transition)
}
}
pub fn register(plugin: &gst::Plugin) -> Result<(), glib::BoolError> {
gst::Element::register(
Some(plugin),
"ts-jitterbuffer",
gst::Rank::None,
JitterBuffer::get_type(),
)
}
|
// Auto-generated hardware structs from vc_top.hjson
#![allow(unused)]
use modular_bitfield::prelude::*;
#[bitfield]
pub struct IntrState {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct IntrEnable {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct IntrTest {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct Ctrl {
pub freeze: bool,
pub vc_reset: bool,
pub pc_start: B17,
#[skip]
pub _unused0: B13,
}
#[bitfield]
pub struct MemoryBankCtrl {
pub i_mem_enable: B4,
pub d_mem_enable: B8,
#[skip]
pub _unused0: B20,
}
#[bitfield]
pub struct ErrorStatus {
pub i_mem_out_of_range: bool,
pub d_mem_out_of_range: bool,
pub i_mem_disable_access: B4,
pub d_mem_disable_access: B8,
#[skip]
pub _unused0: B18,
}
#[bitfield]
pub struct InitStart {
pub address: B22,
pub imem_dmem_sel: bool,
#[skip]
pub _unused0: B9,
}
#[bitfield]
pub struct InitEnd {
pub address: B22,
pub valid: bool,
#[skip]
pub _unused0: B9,
}
#[bitfield]
pub struct InitStatus {
pub init_pending: bool,
pub init_done: bool,
#[skip]
pub _unused0: B30,
}
extern "C" {
static csr: *mut [u32; 9];
}
pub fn get_intr_state() -> IntrState {
unsafe { IntrState::from_bytes((*csr)[0].to_ne_bytes()) }
}
pub fn set_intr_state(intr_state: IntrState) {
unsafe {
(*csr)[0] = u32::from_ne_bytes(intr_state.into_bytes());
}
}
pub fn get_intr_enable() -> IntrEnable {
unsafe { IntrEnable::from_bytes((*csr)[1].to_ne_bytes()) }
}
pub fn set_intr_enable(intr_enable: IntrEnable) {
unsafe {
(*csr)[1] = u32::from_ne_bytes(intr_enable.into_bytes());
}
}
pub fn get_intr_test() -> IntrTest {
unsafe { IntrTest::from_bytes((*csr)[2].to_ne_bytes()) }
}
pub fn set_intr_test(intr_test: IntrTest) {
unsafe {
(*csr)[2] = u32::from_ne_bytes(intr_test.into_bytes());
}
}
pub fn get_ctrl() -> Ctrl {
unsafe { Ctrl::from_bytes((*csr)[3].to_ne_bytes()) }
}
pub fn set_ctrl(ctrl: Ctrl) {
unsafe {
(*csr)[3] = u32::from_ne_bytes(ctrl.into_bytes());
}
}
pub fn get_memory_bank_ctrl() -> MemoryBankCtrl {
unsafe { MemoryBankCtrl::from_bytes((*csr)[4].to_ne_bytes()) }
}
pub fn set_memory_bank_ctrl(memory_bank_ctrl: MemoryBankCtrl) {
unsafe {
(*csr)[4] = u32::from_ne_bytes(memory_bank_ctrl.into_bytes());
}
}
pub fn get_error_status() -> ErrorStatus {
unsafe { ErrorStatus::from_bytes((*csr)[5].to_ne_bytes()) }
}
pub fn set_error_status(error_status: ErrorStatus) {
unsafe {
(*csr)[5] = u32::from_ne_bytes(error_status.into_bytes());
}
}
pub fn get_init_start() -> InitStart {
unsafe { InitStart::from_bytes((*csr)[6].to_ne_bytes()) }
}
pub fn set_init_start(init_start: InitStart) {
unsafe {
(*csr)[6] = u32::from_ne_bytes(init_start.into_bytes());
}
}
pub fn get_init_end() -> InitEnd {
unsafe { InitEnd::from_bytes((*csr)[7].to_ne_bytes()) }
}
pub fn set_init_end(init_end: InitEnd) {
unsafe {
(*csr)[7] = u32::from_ne_bytes(init_end.into_bytes());
}
}
pub fn get_init_status() -> InitStatus {
unsafe { InitStatus::from_bytes((*csr)[8].to_ne_bytes()) }
}
pub fn set_init_status(init_status: InitStatus) {
unsafe {
(*csr)[8] = u32::from_ne_bytes(init_status.into_bytes());
}
}
Use read_volatile for reading from vc control
Without read_volatile the compiler may try to optimize reads, which
causes issues with renode simulation.
Change-Id: I8877798420cdecc2dece97d168db753adfe89163
GitOrigin-RevId: c7b3e2ee5500c993b48d9793f575758cb0c3a0ac
// Auto-generated hardware structs from vc_top.hjson
#![allow(unused)]
use modular_bitfield::prelude::*;
use core::ptr;
#[bitfield]
pub struct IntrState {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct IntrEnable {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct IntrTest {
pub host_req: bool,
pub finish: bool,
pub instruction_fault: bool,
pub data_fault: bool,
#[skip]
_unused: B28,
}
#[bitfield]
pub struct Ctrl {
pub freeze: bool,
pub vc_reset: bool,
pub pc_start: B17,
#[skip]
pub _unused0: B13,
}
#[bitfield]
pub struct MemoryBankCtrl {
pub i_mem_enable: B4,
pub d_mem_enable: B8,
#[skip]
pub _unused0: B20,
}
#[bitfield]
pub struct ErrorStatus {
pub i_mem_out_of_range: bool,
pub d_mem_out_of_range: bool,
pub i_mem_disable_access: B4,
pub d_mem_disable_access: B8,
#[skip]
pub _unused0: B18,
}
#[bitfield]
pub struct InitStart {
pub address: B22,
pub imem_dmem_sel: bool,
#[skip]
pub _unused0: B9,
}
#[bitfield]
pub struct InitEnd {
pub address: B22,
pub valid: bool,
#[skip]
pub _unused0: B9,
}
#[bitfield]
pub struct InitStatus {
pub init_pending: bool,
pub init_done: bool,
#[skip]
pub _unused0: B30,
}
extern "C" {
static csr: *mut [u32; 9];
}
pub fn get_intr_state() -> IntrState {
unsafe { IntrState::from_bytes(ptr::read_volatile(csr)[0].to_ne_bytes()) }
}
pub fn set_intr_state(intr_state: IntrState) {
unsafe {
(*csr)[0] = u32::from_ne_bytes(intr_state.into_bytes());
}
}
pub fn get_intr_enable() -> IntrEnable {
unsafe { IntrEnable::from_bytes(ptr::read_volatile(csr)[1].to_ne_bytes()) }
}
pub fn set_intr_enable(intr_enable: IntrEnable) {
unsafe {
(*csr)[1] = u32::from_ne_bytes(intr_enable.into_bytes());
}
}
pub fn get_intr_test() -> IntrTest {
unsafe { IntrTest::from_bytes(ptr::read_volatile(csr)[2].to_ne_bytes()) }
}
pub fn set_intr_test(intr_test: IntrTest) {
unsafe {
(*csr)[2] = u32::from_ne_bytes(intr_test.into_bytes());
}
}
pub fn get_ctrl() -> Ctrl {
unsafe { Ctrl::from_bytes(ptr::read_volatile(csr)[3].to_ne_bytes()) }
}
pub fn set_ctrl(ctrl: Ctrl) {
unsafe {
(*csr)[3] = u32::from_ne_bytes(ctrl.into_bytes());
}
}
pub fn get_memory_bank_ctrl() -> MemoryBankCtrl {
unsafe { MemoryBankCtrl::from_bytes(ptr::read_volatile(csr)[4].to_ne_bytes()) }
}
pub fn set_memory_bank_ctrl(memory_bank_ctrl: MemoryBankCtrl) {
unsafe {
(*csr)[4] = u32::from_ne_bytes(memory_bank_ctrl.into_bytes());
}
}
pub fn get_error_status() -> ErrorStatus {
unsafe { ErrorStatus::from_bytes(ptr::read_volatile(csr)[5].to_ne_bytes()) }
}
pub fn set_error_status(error_status: ErrorStatus) {
unsafe {
(*csr)[5] = u32::from_ne_bytes(error_status.into_bytes());
}
}
pub fn get_init_start() -> InitStart {
unsafe { InitStart::from_bytes(ptr::read_volatile(csr)[6].to_ne_bytes()) }
}
pub fn set_init_start(init_start: InitStart) {
unsafe {
(*csr)[6] = u32::from_ne_bytes(init_start.into_bytes());
}
}
pub fn get_init_end() -> InitEnd {
unsafe { InitEnd::from_bytes(ptr::read_volatile(csr)[7].to_ne_bytes()) }
}
pub fn set_init_end(init_end: InitEnd) {
unsafe {
(*csr)[7] = u32::from_ne_bytes(init_end.into_bytes());
}
}
pub fn get_init_status() -> InitStatus {
unsafe { InitStatus::from_bytes(ptr::read_volatile(csr)[8].to_ne_bytes()) }
}
pub fn set_init_status(init_status: InitStatus) {
unsafe {
(*csr)[8] = u32::from_ne_bytes(init_status.into_bytes());
}
}
|
use int_to_bytes::int_to_bytes8;
use itertools::Itertools;
use ssz::ssz_encode;
use state_processing::per_block_processing::{
validate_attestation, verify_deposit_merkle_proof, verify_exit, verify_proposer_slashing,
verify_transfer, verify_transfer_partial,
};
use std::collections::{btree_map::Entry, hash_map, BTreeMap, HashMap, HashSet};
use types::chain_spec::Domain;
use types::{
Attestation, AttestationData, AttesterSlashing, BeaconState, ChainSpec, Deposit, Epoch,
ProposerSlashing, Transfer, VoluntaryExit,
};
#[cfg(test)]
const VERIFY_DEPOSIT_PROOFS: bool = false;
#[cfg(not(test))]
const VERIFY_DEPOSIT_PROOFS: bool = true;
#[derive(Default)]
pub struct OperationPool {
/// Map from attestation ID (see below) to vectors of attestations.
attestations: HashMap<AttestationId, Vec<Attestation>>,
/// Map from deposit index to deposit data.
// NOTE: We assume that there is only one deposit per index
// because the Eth1 data is updated (at most) once per epoch,
// and the spec doesn't seem to accomodate for re-orgs on a time-frame
// longer than an epoch
deposits: BTreeMap<u64, Deposit>,
/// Map from attester index to slashing.
attester_slashings: BTreeMap<u64, AttesterSlashing>,
/// Map from proposer index to slashing.
proposer_slashings: BTreeMap<u64, ProposerSlashing>,
/// Map from exiting validator to their exit data.
voluntary_exits: BTreeMap<u64, VoluntaryExit>,
/// Set of transfers.
transfers: HashSet<Transfer>,
}
/// Serialized `AttestationData` augmented with a domain to encode the fork info.
#[derive(PartialEq, Eq, Clone, Hash, Debug)]
struct AttestationId(Vec<u8>);
/// Number of domain bytes that the end of an attestation ID is padded with.
const DOMAIN_BYTES_LEN: usize = 8;
impl AttestationId {
fn from_data(attestation: &AttestationData, state: &BeaconState, spec: &ChainSpec) -> Self {
let mut bytes = ssz_encode(attestation);
let epoch = attestation.slot.epoch(spec.slots_per_epoch);
bytes.extend_from_slice(&AttestationId::compute_domain_bytes(epoch, state, spec));
AttestationId(bytes)
}
fn compute_domain_bytes(epoch: Epoch, state: &BeaconState, spec: &ChainSpec) -> Vec<u8> {
int_to_bytes8(spec.get_domain(epoch, Domain::Attestation, &state.fork))
}
fn domain_bytes_match(&self, domain_bytes: &[u8]) -> bool {
&self.0[self.0.len() - DOMAIN_BYTES_LEN..] == domain_bytes
}
}
/// Compute a fitness score for an attestation.
///
/// The score is calculated by determining the number of *new* attestations that
/// the aggregate attestation introduces, and is proportional to the size of the reward we will
/// receive for including it in a block.
// TODO: this could be optimised with a map from validator index to whether that validator has
// attested in the *current* epoch. Alternatively, we could cache an index that allows us to
// quickly look up the attestations in the current epoch for a given shard.
fn attestation_score(attestation: &Attestation, state: &BeaconState) -> usize {
// Bitfield of validators whose attestations are new/fresh.
let mut new_validators = attestation.aggregation_bitfield.clone();
state
.current_epoch_attestations
.iter()
.filter(|current_attestation| current_attestation.data.shard == attestation.data.shard)
.for_each(|current_attestation| {
// Remove the validators who have signed the existing attestation (they are not new)
new_validators.difference_inplace(¤t_attestation.aggregation_bitfield);
});
new_validators.num_set_bits()
}
#[derive(Debug, PartialEq, Clone)]
pub enum DepositInsertStatus {
/// The deposit was not already in the pool.
Fresh,
/// The deposit already existed in the pool.
Duplicate,
/// The deposit conflicted with an existing deposit, which was replaced.
Replaced(Deposit),
}
impl OperationPool {
/// Create a new operation pool.
pub fn new() -> Self {
Self::default()
}
/// Insert an attestation into the pool, aggregating it with existing attestations if possible.
pub fn insert_attestation(
&mut self,
attestation: Attestation,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
// Check that attestation signatures are valid.
// FIXME: should disable the time-dependent checks.
validate_attestation(state, &attestation, spec).map_err(|_| ())?;
let id = AttestationId::from_data(&attestation.data, state, spec);
let existing_attestations = match self.attestations.entry(id) {
hash_map::Entry::Vacant(entry) => {
entry.insert(vec![attestation]);
return Ok(());
}
hash_map::Entry::Occupied(entry) => entry.into_mut(),
};
let mut aggregated = false;
for existing_attestation in existing_attestations.iter_mut() {
if existing_attestation.signers_disjoint_from(&attestation) {
existing_attestation.aggregate(&attestation);
aggregated = true;
} else if *existing_attestation == attestation {
aggregated = true;
}
}
if !aggregated {
existing_attestations.push(attestation);
}
Ok(())
}
/// Get a list of attestations for inclusion in a block.
pub fn get_attestations(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Attestation> {
// Attestations for the current fork...
// TODO: should we also check domain bytes for the previous epoch?
let current_epoch = state.slot.epoch(spec.slots_per_epoch);
let domain_bytes = AttestationId::compute_domain_bytes(current_epoch, state, spec);
self.attestations
.iter()
.filter(|(key, _)| key.domain_bytes_match(&domain_bytes))
.flat_map(|(_, attestations)| attestations)
// That are valid...
.filter(|attestation| validate_attestation(state, attestation, spec).is_ok())
// Scored by the number of new attestations they introduce (descending)
.map(|att| (att, attestation_score(att, state)))
.sorted_by_key(|&(_, score)| std::cmp::Reverse(score))
// Limited to the maximum number of attestations per block
.take(spec.max_attestations as usize)
.map(|(att, _)| att)
.cloned()
.collect()
}
/// Remove attestations which are too old to be included in a block.
// TODO: we could probably prune other attestations here:
// - ones that are completely covered by attestations included in the state
// - maybe ones invalidated by the confirmation of one fork over another
pub fn prune_attestations(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
self.attestations.retain(|_, attestations| {
// All the attestations in this bucket have the same data, so we only need to
// check the first one.
attestations.first().map_or(false, |att| {
finalized_state.slot < att.data.slot + spec.slots_per_epoch
})
});
}
/// Add a deposit to the pool.
///
/// No two distinct deposits should be added with the same index.
pub fn insert_deposit(&mut self, deposit: Deposit) -> DepositInsertStatus {
use DepositInsertStatus::*;
match self.deposits.entry(deposit.index) {
Entry::Vacant(entry) => {
entry.insert(deposit);
Fresh
}
Entry::Occupied(mut entry) => {
if entry.get() == &deposit {
Duplicate
} else {
Replaced(entry.insert(deposit))
}
}
}
}
/// Get an ordered list of deposits for inclusion in a block.
///
/// Take at most the maximum number of deposits, beginning from the current deposit index.
pub fn get_deposits(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Deposit> {
let start_idx = state.deposit_index;
(start_idx..start_idx + spec.max_deposits)
.map(|idx| self.deposits.get(&idx))
.take_while(|deposit| {
// NOTE: we don't use verify_deposit, because it requires the
// deposit's index to match the state's, and we would like to return
// a batch with increasing indices
deposit.map_or(false, |deposit| {
!VERIFY_DEPOSIT_PROOFS || verify_deposit_merkle_proof(state, deposit, spec)
})
})
.flatten()
.cloned()
.collect()
}
/// Remove all deposits with index less than the deposit index of the latest finalised block.
pub fn prune_deposits(&mut self, state: &BeaconState) -> BTreeMap<u64, Deposit> {
let deposits_keep = self.deposits.split_off(&state.deposit_index);
std::mem::replace(&mut self.deposits, deposits_keep)
}
/// Insert a proposer slashing into the pool.
pub fn insert_proposer_slashing(
&mut self,
slashing: ProposerSlashing,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
// TODO: should maybe insert anyway if the proposer is unknown in the validator index,
// because they could *become* known later
// FIXME: error handling
verify_proposer_slashing(&slashing, state, spec).map_err(|_| ())?;
self.proposer_slashings
.insert(slashing.proposer_index, slashing);
Ok(())
}
/// Only check whether the implicated validator has already been slashed, because
/// all slashings in the pool were validated upon insertion.
// TODO: we need a mechanism to avoid including a proposer slashing and an attester
// slashing for the same validator in the same block
pub fn get_proposer_slashings(
&self,
state: &BeaconState,
spec: &ChainSpec,
) -> Vec<ProposerSlashing> {
// We sort by validator index, which is safe, because a validator can only supply
// so many valid slashings for lower-indexed validators (and even that is unlikely)
filter_limit_operations(
self.proposer_slashings.values(),
|slashing| {
state
.validator_registry
.get(slashing.proposer_index as usize)
.map_or(false, |validator| !validator.slashed)
},
spec.max_proposer_slashings,
)
}
/// Prune slashings for all slashed or withdrawn validators.
pub fn prune_proposer_slashings(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
let to_prune = self
.proposer_slashings
.keys()
.flat_map(|&validator_index| {
finalized_state
.validator_registry
.get(validator_index as usize)
.filter(|validator| {
validator.slashed
|| validator.is_withdrawable_at(finalized_state.current_epoch(spec))
})
.map(|_| validator_index)
})
.collect::<Vec<_>>();
for validator_index in to_prune {
self.proposer_slashings.remove(&validator_index);
}
}
// TODO: copy ProposerSlashing code for AttesterSlashing
/// Insert a voluntary exit, validating it almost-entirely (future exits are permitted).
pub fn insert_voluntary_exit(
&mut self,
exit: VoluntaryExit,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
verify_exit(state, &exit, spec, false).map_err(|_| ())?;
self.voluntary_exits.insert(exit.validator_index, exit);
Ok(())
}
/// Get a list of voluntary exits for inclusion in a block.
// TODO: could optimise this by eliding the checks that have already been done on insert
pub fn get_voluntary_exits(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<VoluntaryExit> {
filter_limit_operations(
self.voluntary_exits.values(),
|exit| verify_exit(state, exit, spec, true).is_ok(),
spec.max_voluntary_exits,
)
}
/// Prune if validator has already exited at the last finalized state.
pub fn prune_voluntary_exits(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
let to_prune = self
.voluntary_exits
.keys()
.flat_map(|&validator_index| {
finalized_state
.validator_registry
.get(validator_index as usize)
.filter(|validator| validator.is_exited_at(finalized_state.current_epoch(spec)))
.map(|_| validator_index)
})
.collect::<Vec<_>>();
for validator_index in to_prune {
self.voluntary_exits.remove(&validator_index);
}
}
/// Insert a transfer into the pool, checking it for validity in the process.
pub fn insert_transfer(
&mut self,
transfer: Transfer,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
// The signature of the transfer isn't hashed, but because we check
// it before we insert into the HashSet, we can't end up with duplicate
// transactions.
verify_transfer_partial(state, &transfer, spec, true).map_err(|_| ())?;
self.transfers.insert(transfer);
Ok(())
}
/// Get a list of transfers for inclusion in a block.
// TODO: improve the economic optimality of this function by taking the transfer
// fees into account, and dependencies between transfers in the same block
// e.g. A pays B, B pays C
pub fn get_transfers(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Transfer> {
filter_limit_operations(
&self.transfers,
|transfer| verify_transfer(state, transfer, spec).is_ok(),
spec.max_transfers,
)
}
/// Prune the set of transfers by removing all those whose slot has already passed.
pub fn prune_transfers(&mut self, finalized_state: &BeaconState) {
self.transfers = self
.transfers
.drain()
.filter(|transfer| transfer.slot > finalized_state.slot)
.collect();
}
/// Prune all types of transactions given the latest finalized state.
pub fn prune_all(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
self.prune_attestations(finalized_state, spec);
self.prune_deposits(finalized_state);
self.prune_proposer_slashings(finalized_state, spec);
// FIXME: add attester slashings
self.prune_voluntary_exits(finalized_state, spec);
self.prune_transfers(finalized_state);
}
}
/// Filter up to a maximum number of operations out of a slice.
fn filter_limit_operations<'a, T: 'a, I, F>(operations: I, filter: F, limit: u64) -> Vec<T>
where
I: IntoIterator<Item = &'a T>,
F: Fn(&T) -> bool,
T: Clone,
{
operations
.into_iter()
.filter(|x| filter(*x))
.take(limit as usize)
.cloned()
.collect()
}
#[cfg(test)]
mod tests {
use super::DepositInsertStatus::*;
use super::*;
use types::test_utils::{SeedableRng, TestRandom, XorShiftRng};
#[test]
fn insert_deposit() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let mut op_pool = OperationPool::new();
let deposit1 = Deposit::random_for_test(&mut rng);
let mut deposit2 = Deposit::random_for_test(&mut rng);
deposit2.index = deposit1.index;
assert_eq!(op_pool.insert_deposit(deposit1.clone()), Fresh);
assert_eq!(op_pool.insert_deposit(deposit1.clone()), Duplicate);
assert_eq!(op_pool.insert_deposit(deposit2), Replaced(deposit1));
}
#[test]
fn get_deposits_max() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let mut op_pool = OperationPool::new();
let spec = ChainSpec::foundation();
let start = 10000;
let max_deposits = spec.max_deposits;
let extra = 5;
let offset = 1;
assert!(offset <= extra);
let proto_deposit = Deposit::random_for_test(&mut rng);
let deposits = (start..start + max_deposits + extra)
.map(|index| {
let mut deposit = proto_deposit.clone();
deposit.index = index;
deposit
})
.collect::<Vec<_>>();
for deposit in &deposits {
assert_eq!(op_pool.insert_deposit(deposit.clone()), Fresh);
}
let mut state = BeaconState::random_for_test(&mut rng);
state.deposit_index = start + offset;
let deposits_for_block = op_pool.get_deposits(&state, &spec);
assert_eq!(deposits_for_block.len() as u64, max_deposits);
assert_eq!(
deposits_for_block[..],
deposits[offset as usize..(offset + max_deposits) as usize]
);
}
// TODO: more tests
}
Operation pool: deposit pruning tests
use int_to_bytes::int_to_bytes8;
use itertools::Itertools;
use ssz::ssz_encode;
use state_processing::per_block_processing::errors::ProposerSlashingValidationError;
use state_processing::per_block_processing::{
validate_attestation, verify_deposit_merkle_proof, verify_exit, verify_proposer_slashing,
verify_transfer, verify_transfer_partial,
};
use std::collections::{btree_map::Entry, hash_map, BTreeMap, HashMap, HashSet};
use types::chain_spec::Domain;
use types::{
Attestation, AttestationData, AttesterSlashing, BeaconState, ChainSpec, Deposit, Epoch,
ProposerSlashing, Transfer, VoluntaryExit,
};
#[cfg(test)]
const VERIFY_DEPOSIT_PROOFS: bool = false;
#[cfg(not(test))]
const VERIFY_DEPOSIT_PROOFS: bool = true;
#[derive(Default)]
pub struct OperationPool {
/// Map from attestation ID (see below) to vectors of attestations.
attestations: HashMap<AttestationId, Vec<Attestation>>,
/// Map from deposit index to deposit data.
// NOTE: We assume that there is only one deposit per index
// because the Eth1 data is updated (at most) once per epoch,
// and the spec doesn't seem to accomodate for re-orgs on a time-frame
// longer than an epoch
deposits: BTreeMap<u64, Deposit>,
/// Map from attester index to slashing.
attester_slashings: BTreeMap<u64, AttesterSlashing>,
/// Map from proposer index to slashing.
proposer_slashings: BTreeMap<u64, ProposerSlashing>,
/// Map from exiting validator to their exit data.
voluntary_exits: BTreeMap<u64, VoluntaryExit>,
/// Set of transfers.
transfers: HashSet<Transfer>,
}
/// Serialized `AttestationData` augmented with a domain to encode the fork info.
#[derive(PartialEq, Eq, Clone, Hash, Debug)]
struct AttestationId(Vec<u8>);
/// Number of domain bytes that the end of an attestation ID is padded with.
const DOMAIN_BYTES_LEN: usize = 8;
impl AttestationId {
fn from_data(attestation: &AttestationData, state: &BeaconState, spec: &ChainSpec) -> Self {
let mut bytes = ssz_encode(attestation);
let epoch = attestation.slot.epoch(spec.slots_per_epoch);
bytes.extend_from_slice(&AttestationId::compute_domain_bytes(epoch, state, spec));
AttestationId(bytes)
}
fn compute_domain_bytes(epoch: Epoch, state: &BeaconState, spec: &ChainSpec) -> Vec<u8> {
int_to_bytes8(spec.get_domain(epoch, Domain::Attestation, &state.fork))
}
fn domain_bytes_match(&self, domain_bytes: &[u8]) -> bool {
&self.0[self.0.len() - DOMAIN_BYTES_LEN..] == domain_bytes
}
}
/// Compute a fitness score for an attestation.
///
/// The score is calculated by determining the number of *new* attestations that
/// the aggregate attestation introduces, and is proportional to the size of the reward we will
/// receive for including it in a block.
// TODO: this could be optimised with a map from validator index to whether that validator has
// attested in the *current* epoch. Alternatively, we could cache an index that allows us to
// quickly look up the attestations in the current epoch for a given shard.
fn attestation_score(attestation: &Attestation, state: &BeaconState) -> usize {
// Bitfield of validators whose attestations are new/fresh.
let mut new_validators = attestation.aggregation_bitfield.clone();
state
.current_epoch_attestations
.iter()
.filter(|current_attestation| current_attestation.data.shard == attestation.data.shard)
.for_each(|current_attestation| {
// Remove the validators who have signed the existing attestation (they are not new)
new_validators.difference_inplace(¤t_attestation.aggregation_bitfield);
});
new_validators.num_set_bits()
}
#[derive(Debug, PartialEq, Clone)]
pub enum DepositInsertStatus {
/// The deposit was not already in the pool.
Fresh,
/// The deposit already existed in the pool.
Duplicate,
/// The deposit conflicted with an existing deposit, which was replaced.
Replaced(Box<Deposit>),
}
impl OperationPool {
/// Create a new operation pool.
pub fn new() -> Self {
Self::default()
}
/// Insert an attestation into the pool, aggregating it with existing attestations if possible.
pub fn insert_attestation(
&mut self,
attestation: Attestation,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
// Check that attestation signatures are valid.
// FIXME: should disable the time-dependent checks.
validate_attestation(state, &attestation, spec).map_err(|_| ())?;
let id = AttestationId::from_data(&attestation.data, state, spec);
let existing_attestations = match self.attestations.entry(id) {
hash_map::Entry::Vacant(entry) => {
entry.insert(vec![attestation]);
return Ok(());
}
hash_map::Entry::Occupied(entry) => entry.into_mut(),
};
let mut aggregated = false;
for existing_attestation in existing_attestations.iter_mut() {
if existing_attestation.signers_disjoint_from(&attestation) {
existing_attestation.aggregate(&attestation);
aggregated = true;
} else if *existing_attestation == attestation {
aggregated = true;
}
}
if !aggregated {
existing_attestations.push(attestation);
}
Ok(())
}
/// Get a list of attestations for inclusion in a block.
pub fn get_attestations(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Attestation> {
// Attestations for the current fork...
// TODO: should we also check domain bytes for the previous epoch?
let current_epoch = state.slot.epoch(spec.slots_per_epoch);
let domain_bytes = AttestationId::compute_domain_bytes(current_epoch, state, spec);
self.attestations
.iter()
.filter(|(key, _)| key.domain_bytes_match(&domain_bytes))
.flat_map(|(_, attestations)| attestations)
// That are valid...
.filter(|attestation| validate_attestation(state, attestation, spec).is_ok())
// Scored by the number of new attestations they introduce (descending)
.map(|att| (att, attestation_score(att, state)))
.sorted_by_key(|&(_, score)| std::cmp::Reverse(score))
// Limited to the maximum number of attestations per block
.take(spec.max_attestations as usize)
.map(|(att, _)| att)
.cloned()
.collect()
}
/// Remove attestations which are too old to be included in a block.
// TODO: we could probably prune other attestations here:
// - ones that are completely covered by attestations included in the state
// - maybe ones invalidated by the confirmation of one fork over another
pub fn prune_attestations(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
self.attestations.retain(|_, attestations| {
// All the attestations in this bucket have the same data, so we only need to
// check the first one.
attestations.first().map_or(false, |att| {
finalized_state.slot < att.data.slot + spec.slots_per_epoch
})
});
}
/// Add a deposit to the pool.
///
/// No two distinct deposits should be added with the same index.
pub fn insert_deposit(&mut self, deposit: Deposit) -> DepositInsertStatus {
use DepositInsertStatus::*;
match self.deposits.entry(deposit.index) {
Entry::Vacant(entry) => {
entry.insert(deposit);
Fresh
}
Entry::Occupied(mut entry) => {
if entry.get() == &deposit {
Duplicate
} else {
Replaced(Box::new(entry.insert(deposit)))
}
}
}
}
/// Get an ordered list of deposits for inclusion in a block.
///
/// Take at most the maximum number of deposits, beginning from the current deposit index.
pub fn get_deposits(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Deposit> {
let start_idx = state.deposit_index;
(start_idx..start_idx + spec.max_deposits)
.map(|idx| self.deposits.get(&idx))
.take_while(|deposit| {
// NOTE: we don't use verify_deposit, because it requires the
// deposit's index to match the state's, and we would like to return
// a batch with increasing indices
deposit.map_or(false, |deposit| {
!VERIFY_DEPOSIT_PROOFS || verify_deposit_merkle_proof(state, deposit, spec)
})
})
.flatten()
.cloned()
.collect()
}
/// Remove all deposits with index less than the deposit index of the latest finalised block.
pub fn prune_deposits(&mut self, state: &BeaconState) -> BTreeMap<u64, Deposit> {
let deposits_keep = self.deposits.split_off(&state.deposit_index);
std::mem::replace(&mut self.deposits, deposits_keep)
}
/// The number of deposits stored in the pool.
pub fn num_deposits(&self) -> usize {
self.deposits.len()
}
/// Insert a proposer slashing into the pool.
pub fn insert_proposer_slashing(
&mut self,
slashing: ProposerSlashing,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ProposerSlashingValidationError> {
// TODO: should maybe insert anyway if the proposer is unknown in the validator index,
// because they could *become* known later
verify_proposer_slashing(&slashing, state, spec)?;
self.proposer_slashings
.insert(slashing.proposer_index, slashing);
Ok(())
}
/// Only check whether the implicated validator has already been slashed, because
/// all slashings in the pool were validated upon insertion.
// TODO: we need a mechanism to avoid including a proposer slashing and an attester
// slashing for the same validator in the same block
pub fn get_proposer_slashings(
&self,
state: &BeaconState,
spec: &ChainSpec,
) -> Vec<ProposerSlashing> {
// We sort by validator index, which is safe, because a validator can only supply
// so many valid slashings for lower-indexed validators (and even that is unlikely)
filter_limit_operations(
self.proposer_slashings.values(),
|slashing| {
state
.validator_registry
.get(slashing.proposer_index as usize)
.map_or(false, |validator| !validator.slashed)
},
spec.max_proposer_slashings,
)
}
/// Prune slashings for all slashed or withdrawn validators.
pub fn prune_proposer_slashings(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
let to_prune = self
.proposer_slashings
.keys()
.flat_map(|&validator_index| {
finalized_state
.validator_registry
.get(validator_index as usize)
.filter(|validator| {
validator.slashed
|| validator.is_withdrawable_at(finalized_state.current_epoch(spec))
})
.map(|_| validator_index)
})
.collect::<Vec<_>>();
for validator_index in to_prune {
self.proposer_slashings.remove(&validator_index);
}
}
// TODO: copy ProposerSlashing code for AttesterSlashing
/// Insert a voluntary exit, validating it almost-entirely (future exits are permitted).
pub fn insert_voluntary_exit(
&mut self,
exit: VoluntaryExit,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
verify_exit(state, &exit, spec, false).map_err(|_| ())?;
self.voluntary_exits.insert(exit.validator_index, exit);
Ok(())
}
/// Get a list of voluntary exits for inclusion in a block.
// TODO: could optimise this by eliding the checks that have already been done on insert
pub fn get_voluntary_exits(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<VoluntaryExit> {
filter_limit_operations(
self.voluntary_exits.values(),
|exit| verify_exit(state, exit, spec, true).is_ok(),
spec.max_voluntary_exits,
)
}
/// Prune if validator has already exited at the last finalized state.
pub fn prune_voluntary_exits(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
let to_prune = self
.voluntary_exits
.keys()
.flat_map(|&validator_index| {
finalized_state
.validator_registry
.get(validator_index as usize)
.filter(|validator| validator.is_exited_at(finalized_state.current_epoch(spec)))
.map(|_| validator_index)
})
.collect::<Vec<_>>();
for validator_index in to_prune {
self.voluntary_exits.remove(&validator_index);
}
}
/// Insert a transfer into the pool, checking it for validity in the process.
pub fn insert_transfer(
&mut self,
transfer: Transfer,
state: &BeaconState,
spec: &ChainSpec,
) -> Result<(), ()> {
// The signature of the transfer isn't hashed, but because we check
// it before we insert into the HashSet, we can't end up with duplicate
// transactions.
verify_transfer_partial(state, &transfer, spec, true).map_err(|_| ())?;
self.transfers.insert(transfer);
Ok(())
}
/// Get a list of transfers for inclusion in a block.
// TODO: improve the economic optimality of this function by taking the transfer
// fees into account, and dependencies between transfers in the same block
// e.g. A pays B, B pays C
pub fn get_transfers(&self, state: &BeaconState, spec: &ChainSpec) -> Vec<Transfer> {
filter_limit_operations(
&self.transfers,
|transfer| verify_transfer(state, transfer, spec).is_ok(),
spec.max_transfers,
)
}
/// Prune the set of transfers by removing all those whose slot has already passed.
pub fn prune_transfers(&mut self, finalized_state: &BeaconState) {
self.transfers = self
.transfers
.drain()
.filter(|transfer| transfer.slot > finalized_state.slot)
.collect();
}
/// Prune all types of transactions given the latest finalized state.
pub fn prune_all(&mut self, finalized_state: &BeaconState, spec: &ChainSpec) {
self.prune_attestations(finalized_state, spec);
self.prune_deposits(finalized_state);
self.prune_proposer_slashings(finalized_state, spec);
// FIXME: add attester slashings
self.prune_voluntary_exits(finalized_state, spec);
self.prune_transfers(finalized_state);
}
}
/// Filter up to a maximum number of operations out of a slice.
fn filter_limit_operations<'a, T: 'a, I, F>(operations: I, filter: F, limit: u64) -> Vec<T>
where
I: IntoIterator<Item = &'a T>,
F: Fn(&T) -> bool,
T: Clone,
{
operations
.into_iter()
.filter(|x| filter(*x))
.take(limit as usize)
.cloned()
.collect()
}
#[cfg(test)]
mod tests {
use super::DepositInsertStatus::*;
use super::*;
use types::test_utils::{SeedableRng, TestRandom, XorShiftRng};
#[test]
fn insert_deposit() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let mut op_pool = OperationPool::new();
let deposit1 = Deposit::random_for_test(&mut rng);
let mut deposit2 = Deposit::random_for_test(&mut rng);
deposit2.index = deposit1.index;
assert_eq!(op_pool.insert_deposit(deposit1.clone()), Fresh);
assert_eq!(op_pool.insert_deposit(deposit1.clone()), Duplicate);
assert_eq!(
op_pool.insert_deposit(deposit2),
Replaced(Box::new(deposit1))
);
}
// Create `count` dummy deposits with sequential deposit IDs beginning from `start`.
fn dummy_deposits(rng: &mut XorShiftRng, start: u64, count: u64) -> Vec<Deposit> {
let proto_deposit = Deposit::random_for_test(rng);
(start..start + count)
.map(|index| {
let mut deposit = proto_deposit.clone();
deposit.index = index;
deposit
})
.collect()
}
#[test]
fn get_deposits_max() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let mut op_pool = OperationPool::new();
let spec = ChainSpec::foundation();
let start = 10000;
let max_deposits = spec.max_deposits;
let extra = 5;
let offset = 1;
assert!(offset <= extra);
let deposits = dummy_deposits(&mut rng, start, max_deposits + extra);
for deposit in &deposits {
assert_eq!(op_pool.insert_deposit(deposit.clone()), Fresh);
}
let mut state = BeaconState::random_for_test(&mut rng);
state.deposit_index = start + offset;
let deposits_for_block = op_pool.get_deposits(&state, &spec);
assert_eq!(deposits_for_block.len() as u64, max_deposits);
assert_eq!(
deposits_for_block[..],
deposits[offset as usize..(offset + max_deposits) as usize]
);
}
#[test]
fn prune_deposits() {
let rng = &mut XorShiftRng::from_seed([42; 16]);
let mut op_pool = OperationPool::new();
let spec = ChainSpec::foundation();
let start1 = 100;
let count = 100;
let gap = 25;
let start2 = start1 + count + gap;
let deposits1 = dummy_deposits(rng, start1, count);
let deposits2 = dummy_deposits(rng, start2, count);
for d in deposits1.into_iter().chain(deposits2) {
op_pool.insert_deposit(d);
}
assert_eq!(op_pool.num_deposits(), 2 * count as usize);
let mut state = BeaconState::random_for_test(rng);
state.deposit_index = start1;
// Pruning the first bunch of deposits in batches of 5 should work.
let step = 5;
let mut pool_size = step + 2 * count as usize;
for i in (start1..=(start1 + count)).step_by(step) {
state.deposit_index = i;
op_pool.prune_deposits(&state);
pool_size -= step;
assert_eq!(op_pool.num_deposits(), pool_size);
}
assert_eq!(pool_size, count as usize);
// Pruning in the gap should do nothing.
for i in (start1 + count..start2).step_by(step) {
state.deposit_index = i;
op_pool.prune_deposits(&state);
assert_eq!(op_pool.num_deposits(), count as usize);
}
// Same again for the later deposits.
pool_size += step;
for i in (start2..=(start2 + count)).step_by(step) {
state.deposit_index = i;
op_pool.prune_deposits(&state);
pool_size -= step;
assert_eq!(op_pool.num_deposits(), pool_size);
}
assert_eq!(op_pool.num_deposits(), 0);
}
// TODO: more tests
}
|
use crate::checkpoint::CheckPoint;
use crate::errors::{BeaconChainError as Error, BlockProductionError};
use crate::fork_choice::{Error as ForkChoiceError, ForkChoice};
use crate::metrics::Metrics;
use crate::persisted_beacon_chain::{PersistedBeaconChain, BEACON_CHAIN_DB_KEY};
use lmd_ghost::LmdGhost;
use log::trace;
use operation_pool::DepositInsertStatus;
use operation_pool::{OperationPool, PersistedOperationPool};
use parking_lot::{RwLock, RwLockReadGuard};
use slot_clock::SlotClock;
use state_processing::per_block_processing::errors::{
AttestationValidationError, AttesterSlashingValidationError, DepositValidationError,
ExitValidationError, ProposerSlashingValidationError, TransferValidationError,
};
use state_processing::{
per_block_processing, per_block_processing_without_verifying_block_signature,
per_slot_processing, BlockProcessingError,
};
use std::sync::Arc;
use store::iter::{BlockIterator, BlockRootsIterator, StateRootsIterator};
use store::{Error as DBError, Store};
use tree_hash::TreeHash;
use types::*;
// Text included in blocks.
// Must be 32-bytes or panic.
//
// |-------must be this long------|
pub const GRAFFITI: &str = "sigp/lighthouse-0.0.0-prerelease";
#[derive(Debug, PartialEq)]
pub enum BlockProcessingOutcome {
/// Block was valid and imported into the block graph.
Processed { block_root: Hash256 },
/// The blocks parent_root is unknown.
ParentUnknown { parent: Hash256 },
/// The block slot is greater than the present slot.
FutureSlot {
present_slot: Slot,
block_slot: Slot,
},
/// The block state_root does not match the generated state.
StateRootMismatch,
/// The block was a genesis block, these blocks cannot be re-imported.
GenesisBlock,
/// The slot is finalized, no need to import.
FinalizedSlot,
/// Block is already known, no need to re-import.
BlockIsAlreadyKnown,
/// The block could not be applied to the state, it is invalid.
PerBlockProcessingError(BlockProcessingError),
}
pub trait BeaconChainTypes {
type Store: store::Store;
type SlotClock: slot_clock::SlotClock;
type LmdGhost: LmdGhost<Self::Store, Self::EthSpec>;
type EthSpec: types::EthSpec;
}
/// Represents the "Beacon Chain" component of Ethereum 2.0. Allows import of blocks and block
/// operations and chooses a canonical head.
pub struct BeaconChain<T: BeaconChainTypes> {
pub spec: ChainSpec,
/// Persistent storage for blocks, states, etc. Typically an on-disk store, such as LevelDB.
pub store: Arc<T::Store>,
/// Reports the current slot, typically based upon the system clock.
pub slot_clock: T::SlotClock,
/// Stores all operations (e.g., `Attestation`, `Deposit`, etc) that are candidates for
/// inclusion in a block.
pub op_pool: OperationPool<T::EthSpec>,
/// Stores a "snapshot" of the chain at the time the head-of-the-chain block was recieved.
canonical_head: RwLock<CheckPoint<T::EthSpec>>,
/// The same state from `self.canonical_head`, but updated at the start of each slot with a
/// skip slot if no block is recieved. This is effectively a cache that avoids repeating calls
/// to `per_slot_processing`.
state: RwLock<BeaconState<T::EthSpec>>,
/// The root of the genesis block.
genesis_block_root: Hash256,
/// A state-machine that is updated with information from the network and chooses a canonical
/// head block.
pub fork_choice: ForkChoice<T>,
/// Stores metrics about this `BeaconChain`.
pub metrics: Metrics,
}
impl<T: BeaconChainTypes> BeaconChain<T> {
/// Instantiate a new Beacon Chain, from genesis.
pub fn from_genesis(
store: Arc<T::Store>,
slot_clock: T::SlotClock,
mut genesis_state: BeaconState<T::EthSpec>,
genesis_block: BeaconBlock,
spec: ChainSpec,
) -> Result<Self, Error> {
genesis_state.build_all_caches(&spec)?;
let state_root = genesis_state.canonical_root();
store.put(&state_root, &genesis_state)?;
let genesis_block_root = genesis_block.block_header().canonical_root();
store.put(&genesis_block_root, &genesis_block)?;
// Also store the genesis block under the `ZERO_HASH` key.
let genesis_block_root = genesis_block.block_header().canonical_root();
store.put(&spec.zero_hash, &genesis_block)?;
let canonical_head = RwLock::new(CheckPoint::new(
genesis_block.clone(),
genesis_block_root,
genesis_state.clone(),
state_root,
));
Ok(Self {
spec,
slot_clock,
op_pool: OperationPool::new(),
state: RwLock::new(genesis_state),
canonical_head,
genesis_block_root,
fork_choice: ForkChoice::new(store.clone(), &genesis_block, genesis_block_root),
metrics: Metrics::new()?,
store,
})
}
/// Attempt to load an existing instance from the given `store`.
pub fn from_store(
store: Arc<T::Store>,
spec: ChainSpec,
) -> Result<Option<BeaconChain<T>>, Error> {
let key = Hash256::from_slice(&BEACON_CHAIN_DB_KEY.as_bytes());
let p: PersistedBeaconChain<T> = match store.get(&key) {
Err(e) => return Err(e.into()),
Ok(None) => return Ok(None),
Ok(Some(p)) => p,
};
let slot_clock = T::SlotClock::new(
spec.genesis_slot,
p.state.genesis_time,
spec.seconds_per_slot,
);
let last_finalized_root = p.canonical_head.beacon_state.finalized_root;
let last_finalized_block = &p.canonical_head.beacon_block;
let op_pool = p.op_pool.into_operation_pool(&p.state, &spec);
Ok(Some(BeaconChain {
spec,
slot_clock,
fork_choice: ForkChoice::new(store.clone(), last_finalized_block, last_finalized_root),
op_pool,
canonical_head: RwLock::new(p.canonical_head),
state: RwLock::new(p.state),
genesis_block_root: p.genesis_block_root,
metrics: Metrics::new()?,
store,
}))
}
/// Attempt to save this instance to `self.store`.
pub fn persist(&self) -> Result<(), Error> {
let p: PersistedBeaconChain<T> = PersistedBeaconChain {
canonical_head: self.canonical_head.read().clone(),
op_pool: PersistedOperationPool::from_operation_pool(&self.op_pool),
genesis_block_root: self.genesis_block_root,
state: self.state.read().clone(),
};
let key = Hash256::from_slice(&BEACON_CHAIN_DB_KEY.as_bytes());
self.store.put(&key, &p)?;
Ok(())
}
/// Returns the beacon block body for each beacon block root in `roots`.
///
/// Fails if any root in `roots` does not have a corresponding block.
pub fn get_block_bodies(&self, roots: &[Hash256]) -> Result<Vec<BeaconBlockBody>, Error> {
let bodies: Result<Vec<BeaconBlockBody>, _> = roots
.iter()
.map(|root| match self.get_block(root)? {
Some(block) => Ok(block.body),
None => Err(Error::DBInconsistent(format!("Missing block: {}", root))),
})
.collect();
Ok(bodies?)
}
/// Returns the beacon block header for each beacon block root in `roots`.
///
/// Fails if any root in `roots` does not have a corresponding block.
pub fn get_block_headers(&self, roots: &[Hash256]) -> Result<Vec<BeaconBlockHeader>, Error> {
let headers: Result<Vec<BeaconBlockHeader>, _> = roots
.iter()
.map(|root| match self.get_block(root)? {
Some(block) => Ok(block.block_header()),
None => Err(Error::DBInconsistent("Missing block".into())),
})
.collect();
Ok(headers?)
}
/// Iterate in reverse (highest to lowest slot) through all blocks from the block at `slot`
/// through to the genesis block.
///
/// Returns `None` for headers prior to genesis or when there is an error reading from `Store`.
///
/// Contains duplicate headers when skip slots are encountered.
pub fn rev_iter_blocks(&self, slot: Slot) -> BlockIterator<T::EthSpec, T::Store> {
BlockIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Iterates in reverse (highest to lowest slot) through all block roots from `slot` through to
/// genesis.
///
/// Returns `None` for roots prior to genesis or when there is an error reading from `Store`.
///
/// Contains duplicate roots when skip slots are encountered.
pub fn rev_iter_block_roots(&self, slot: Slot) -> BlockRootsIterator<T::EthSpec, T::Store> {
BlockRootsIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Iterates in reverse (highest to lowest slot) through all state roots from `slot` through to
/// genesis.
///
/// Returns `None` for roots prior to genesis or when there is an error reading from `Store`.
pub fn rev_iter_state_roots(&self, slot: Slot) -> StateRootsIterator<T::EthSpec, T::Store> {
StateRootsIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Returns the block at the given root, if any.
///
/// ## Errors
///
/// May return a database error.
pub fn get_block(&self, block_root: &Hash256) -> Result<Option<BeaconBlock>, Error> {
Ok(self.store.get(block_root)?)
}
/// Returns a read-lock guarded `BeaconState` which is the `canonical_head` that has been
/// updated to match the current slot clock.
pub fn current_state(&self) -> RwLockReadGuard<BeaconState<T::EthSpec>> {
self.state.read()
}
/// Returns a read-lock guarded `CheckPoint` struct for reading the head (as chosen by the
/// fork-choice rule).
///
/// It is important to note that the `beacon_state` returned may not match the present slot. It
/// is the state as it was when the head block was received, which could be some slots prior to
/// now.
pub fn head(&self) -> RwLockReadGuard<CheckPoint<T::EthSpec>> {
self.canonical_head.read()
}
/// Returns the slot of the highest block in the canonical chain.
pub fn best_slot(&self) -> Slot {
self.canonical_head.read().beacon_block.slot
}
/// Ensures the current canonical `BeaconState` has been transitioned to match the `slot_clock`.
pub fn catchup_state(&self) -> Result<(), Error> {
let spec = &self.spec;
let present_slot = match self.slot_clock.present_slot() {
Ok(Some(slot)) => slot,
_ => return Err(Error::UnableToReadSlot),
};
if self.state.read().slot < present_slot {
let mut state = self.state.write();
// If required, transition the new state to the present slot.
for _ in state.slot.as_u64()..present_slot.as_u64() {
// Ensure the next epoch state caches are built in case of an epoch transition.
state.build_committee_cache(RelativeEpoch::Next, spec)?;
per_slot_processing(&mut *state, spec)?;
}
state.build_all_caches(spec)?;
}
Ok(())
}
/// Build all of the caches on the current state.
///
/// Ideally this shouldn't be required, however we leave it here for testing.
pub fn ensure_state_caches_are_built(&self) -> Result<(), Error> {
self.state.write().build_all_caches(&self.spec)?;
Ok(())
}
/// Returns the validator index (if any) for the given public key.
///
/// Information is retrieved from the present `beacon_state.validator_registry`.
pub fn validator_index(&self, pubkey: &PublicKey) -> Option<usize> {
for (i, validator) in self
.head()
.beacon_state
.validator_registry
.iter()
.enumerate()
{
if validator.pubkey == *pubkey {
return Some(i);
}
}
None
}
/// Reads the slot clock, returns `None` if the slot is unavailable.
///
/// The slot might be unavailable due to an error with the system clock, or if the present time
/// is before genesis (i.e., a negative slot).
///
/// This is distinct to `present_slot`, which simply reads the latest state. If a
/// call to `read_slot_clock` results in a higher slot than a call to `present_slot`,
/// `self.state` should undergo per slot processing.
pub fn read_slot_clock(&self) -> Option<Slot> {
match self.slot_clock.present_slot() {
Ok(Some(some_slot)) => Some(some_slot),
Ok(None) => None,
_ => None,
}
}
/// Reads the slot clock (see `self.read_slot_clock()` and returns the number of slots since
/// genesis.
pub fn slots_since_genesis(&self) -> Option<SlotHeight> {
let now = self.read_slot_clock()?;
let genesis_slot = self.spec.genesis_slot;
if now < genesis_slot {
None
} else {
Some(SlotHeight::from(now.as_u64() - genesis_slot.as_u64()))
}
}
/// Returns slot of the present state.
///
/// This is distinct to `read_slot_clock`, which reads from the actual system clock. If
/// `self.state` has not been transitioned it is possible for the system clock to be on a
/// different slot to what is returned from this call.
pub fn present_slot(&self) -> Slot {
self.state.read().slot
}
/// Returns the block proposer for a given slot.
///
/// Information is read from the present `beacon_state` shuffling, only information from the
/// present epoch is available.
pub fn block_proposer(&self, slot: Slot) -> Result<usize, Error> {
// Ensures that the present state has been advanced to the present slot, skipping slots if
// blocks are not present.
self.catchup_state()?;
// TODO: permit lookups of the proposer at any slot.
let index = self.state.read().get_beacon_proposer_index(
slot,
RelativeEpoch::Current,
&self.spec,
)?;
Ok(index)
}
/// Returns the attestation slot and shard for a given validator index.
///
/// Information is read from the current state, so only information from the present and prior
/// epoch is available.
pub fn validator_attestion_slot_and_shard(
&self,
validator_index: usize,
) -> Result<Option<(Slot, u64)>, BeaconStateError> {
trace!(
"BeaconChain::validator_attestion_slot_and_shard: validator_index: {}",
validator_index
);
if let Some(attestation_duty) = self
.state
.read()
.get_attestation_duties(validator_index, RelativeEpoch::Current)?
{
Ok(Some((attestation_duty.slot, attestation_duty.shard)))
} else {
Ok(None)
}
}
/// Produce an `AttestationData` that is valid for the present `slot` and given `shard`.
///
/// Attests to the canonical chain.
pub fn produce_attestation_data(&self, shard: u64) -> Result<AttestationData, Error> {
let state = self.state.read();
let head_block_root = self.head().beacon_block_root;
let head_block_slot = self.head().beacon_block.slot;
self.produce_attestation_data_for_block(shard, head_block_root, head_block_slot, &*state)
}
/// Produce an `AttestationData` that attests to the chain denoted by `block_root` and `state`.
///
/// Permits attesting to any arbitrary chain. Generally, the `produce_attestation_data`
/// function should be used as it attests to the canonical chain.
pub fn produce_attestation_data_for_block(
&self,
shard: u64,
head_block_root: Hash256,
head_block_slot: Slot,
state: &BeaconState<T::EthSpec>,
) -> Result<AttestationData, Error> {
// Collect some metrics.
self.metrics.attestation_production_requests.inc();
let timer = self.metrics.attestation_production_times.start_timer();
let slots_per_epoch = T::EthSpec::slots_per_epoch();
let current_epoch_start_slot = state.current_epoch().start_slot(slots_per_epoch);
// The `target_root` is the root of the first block of the current epoch.
//
// The `state` does not know the root of the block for it's current slot (it only knows
// about blocks from prior slots). This creates an edge-case when the state is on the first
// slot of the epoch -- we're unable to obtain the `target_root` because it is not a prior
// root.
//
// This edge case is handled in two ways:
//
// - If the head block is on the same slot as the state, we use it's root.
// - Otherwise, assume the current slot has been skipped and use the block root from the
// prior slot.
//
// For all other cases, we simply read the `target_root` from `state.latest_block_roots`.
let target_root = if state.slot == current_epoch_start_slot {
if head_block_slot == current_epoch_start_slot {
head_block_root
} else {
*state.get_block_root(current_epoch_start_slot - 1)?
}
} else {
*state.get_block_root(current_epoch_start_slot)?
};
let previous_crosslink_root =
Hash256::from_slice(&state.get_current_crosslink(shard)?.tree_hash_root());
// Collect some metrics.
self.metrics.attestation_production_successes.inc();
timer.observe_duration();
Ok(AttestationData {
beacon_block_root: head_block_root,
source_epoch: state.current_justified_epoch,
source_root: state.current_justified_root,
target_epoch: state.current_epoch(),
target_root,
shard,
previous_crosslink_root,
crosslink_data_root: Hash256::zero(),
})
}
/// Accept a new attestation from the network.
///
/// If valid, the attestation is added to the `op_pool` and aggregated with another attestation
/// if possible.
pub fn process_attestation(
&self,
attestation: Attestation,
) -> Result<(), AttestationValidationError> {
self.metrics.attestation_processing_requests.inc();
let timer = self.metrics.attestation_processing_times.start_timer();
let result = self
.op_pool
.insert_attestation(attestation, &*self.state.read(), &self.spec);
timer.observe_duration();
if result.is_ok() {
self.metrics.attestation_processing_successes.inc();
}
// TODO: process attestation. Please consider:
//
// - Because a block was not added to the op pool does not mean it's invalid (it might
// just be old).
// - The attestation should be rejected if we don't know the block (ideally it should be
// queued, but this may be overkill).
// - The attestation _must_ be validated against it's state before being added to fork
// choice.
// - You can avoid verifying some attestations by first checking if they're a latest
// message. This would involve expanding the `LmdGhost` API.
result
}
/// Accept some deposit and queue it for inclusion in an appropriate block.
pub fn process_deposit(
&self,
deposit: Deposit,
) -> Result<DepositInsertStatus, DepositValidationError> {
self.op_pool.insert_deposit(deposit)
}
/// Accept some exit and queue it for inclusion in an appropriate block.
pub fn process_voluntary_exit(&self, exit: VoluntaryExit) -> Result<(), ExitValidationError> {
self.op_pool
.insert_voluntary_exit(exit, &*self.state.read(), &self.spec)
}
/// Accept some transfer and queue it for inclusion in an appropriate block.
pub fn process_transfer(&self, transfer: Transfer) -> Result<(), TransferValidationError> {
self.op_pool
.insert_transfer(transfer, &*self.state.read(), &self.spec)
}
/// Accept some proposer slashing and queue it for inclusion in an appropriate block.
pub fn process_proposer_slashing(
&self,
proposer_slashing: ProposerSlashing,
) -> Result<(), ProposerSlashingValidationError> {
self.op_pool
.insert_proposer_slashing(proposer_slashing, &*self.state.read(), &self.spec)
}
/// Accept some attester slashing and queue it for inclusion in an appropriate block.
pub fn process_attester_slashing(
&self,
attester_slashing: AttesterSlashing,
) -> Result<(), AttesterSlashingValidationError> {
self.op_pool
.insert_attester_slashing(attester_slashing, &*self.state.read(), &self.spec)
}
/// Accept some block and attempt to add it to block DAG.
///
/// Will accept blocks from prior slots, however it will reject any block from a future slot.
pub fn process_block(&self, block: BeaconBlock) -> Result<BlockProcessingOutcome, Error> {
self.metrics.block_processing_requests.inc();
let timer = self.metrics.block_processing_times.start_timer();
let finalized_slot = self
.state
.read()
.finalized_epoch
.start_slot(T::EthSpec::slots_per_epoch());
if block.slot <= finalized_slot {
return Ok(BlockProcessingOutcome::FinalizedSlot);
}
if block.slot == 0 {
return Ok(BlockProcessingOutcome::GenesisBlock);
}
let block_root = block.block_header().canonical_root();
if block_root == self.genesis_block_root {
return Ok(BlockProcessingOutcome::GenesisBlock);
}
let present_slot = self
.read_slot_clock()
.ok_or_else(|| Error::UnableToReadSlot)?;
if block.slot > present_slot {
return Ok(BlockProcessingOutcome::FutureSlot {
present_slot,
block_slot: block.slot,
});
}
if self.store.exists::<BeaconBlock>(&block_root)? {
return Ok(BlockProcessingOutcome::BlockIsAlreadyKnown);
}
// Load the blocks parent block from the database, returning invalid if that block is not
// found.
let parent_block_root = block.previous_block_root;
let parent_block: BeaconBlock = match self.store.get(&parent_block_root)? {
Some(previous_block_root) => previous_block_root,
None => {
return Ok(BlockProcessingOutcome::ParentUnknown {
parent: parent_block_root,
});
}
};
// Load the parent blocks state from the database, returning an error if it is not found.
// It is an error because if know the parent block we should also know the parent state.
let parent_state_root = parent_block.state_root;
let parent_state = self
.store
.get(&parent_state_root)?
.ok_or_else(|| Error::DBInconsistent(format!("Missing state {}", parent_state_root)))?;
// Transition the parent state to the block slot.
let mut state: BeaconState<T::EthSpec> = parent_state;
for _ in state.slot.as_u64()..block.slot.as_u64() {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_committee_cache(RelativeEpoch::Current, &self.spec)?;
// Apply the received block to its parent state (which has been transitioned into this
// slot).
match per_block_processing(&mut state, &block, &self.spec) {
Err(BlockProcessingError::BeaconStateError(e)) => {
return Err(Error::BeaconStateError(e))
}
Err(e) => return Ok(BlockProcessingOutcome::PerBlockProcessingError(e)),
_ => {}
}
let state_root = state.canonical_root();
if block.state_root != state_root {
return Ok(BlockProcessingOutcome::StateRootMismatch);
}
// Store the block and state.
self.store.put(&block_root, &block)?;
self.store.put(&state_root, &state)?;
// Register the new block with the fork choice service.
self.fork_choice.process_block(&state, &block, block_root)?;
// Execute the fork choice algorithm, enthroning a new head if discovered.
//
// Note: in the future we may choose to run fork-choice less often, potentially based upon
// some heuristic around number of attestations seen for the block.
self.fork_choice()?;
self.metrics.block_processing_successes.inc();
self.metrics
.operations_per_block_attestation
.observe(block.body.attestations.len() as f64);
timer.observe_duration();
Ok(BlockProcessingOutcome::Processed { block_root })
}
/// Produce a new block at the present slot.
///
/// The produced block will not be inherently valid, it must be signed by a block producer.
/// Block signing is out of the scope of this function and should be done by a separate program.
pub fn produce_block(
&self,
randao_reveal: Signature,
) -> Result<(BeaconBlock, BeaconState<T::EthSpec>), BlockProductionError> {
let state = self.state.read().clone();
let slot = self
.read_slot_clock()
.ok_or_else(|| BlockProductionError::UnableToReadSlot)?;
self.produce_block_on_state(state, slot, randao_reveal)
}
/// Produce a block for some `slot` upon the given `state`.
///
/// Typically the `self.produce_block()` function should be used, instead of calling this
/// function directly. This function is useful for purposefully creating forks or blocks at
/// non-current slots.
///
/// The given state will be advanced to the given `produce_at_slot`, then a block will be
/// produced at that slot height.
pub fn produce_block_on_state(
&self,
mut state: BeaconState<T::EthSpec>,
produce_at_slot: Slot,
randao_reveal: Signature,
) -> Result<(BeaconBlock, BeaconState<T::EthSpec>), BlockProductionError> {
self.metrics.block_production_requests.inc();
let timer = self.metrics.block_production_times.start_timer();
// If required, transition the new state to the present slot.
while state.slot < produce_at_slot {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_committee_cache(RelativeEpoch::Current, &self.spec)?;
let previous_block_root = if state.slot > 0 {
*state
.get_block_root(state.slot - 1)
.map_err(|_| BlockProductionError::UnableToGetBlockRootFromState)?
} else {
state.latest_block_header.canonical_root()
};
let mut graffiti: [u8; 32] = [0; 32];
graffiti.copy_from_slice(GRAFFITI.as_bytes());
let (proposer_slashings, attester_slashings) =
self.op_pool.get_slashings(&state, &self.spec);
let mut block = BeaconBlock {
slot: state.slot,
previous_block_root,
state_root: Hash256::zero(), // Updated after the state is calculated.
signature: Signature::empty_signature(), // To be completed by a validator.
body: BeaconBlockBody {
randao_reveal,
// TODO: replace with real data.
eth1_data: Eth1Data {
deposit_count: 0,
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
},
graffiti,
proposer_slashings,
attester_slashings,
attestations: self.op_pool.get_attestations(&state, &self.spec),
deposits: self.op_pool.get_deposits(&state, &self.spec),
voluntary_exits: self.op_pool.get_voluntary_exits(&state, &self.spec),
transfers: self.op_pool.get_transfers(&state, &self.spec),
},
};
per_block_processing_without_verifying_block_signature(&mut state, &block, &self.spec)?;
let state_root = state.canonical_root();
block.state_root = state_root;
self.metrics.block_production_successes.inc();
timer.observe_duration();
Ok((block, state))
}
/// Execute the fork choice algorithm and enthrone the result as the canonical head.
pub fn fork_choice(&self) -> Result<(), Error> {
self.metrics.fork_choice_requests.inc();
// Start fork choice metrics timer.
let timer = self.metrics.fork_choice_times.start_timer();
// Determine the root of the block that is the head of the chain.
let beacon_block_root = self.fork_choice.find_head(&self)?;
// End fork choice metrics timer.
timer.observe_duration();
// If a new head was chosen.
if beacon_block_root != self.head().beacon_block_root {
self.metrics.fork_choice_changed_head.inc();
let beacon_block: BeaconBlock = self
.store
.get(&beacon_block_root)?
.ok_or_else(|| Error::MissingBeaconBlock(beacon_block_root))?;
let beacon_state_root = beacon_block.state_root;
let beacon_state: BeaconState<T::EthSpec> = self
.store
.get(&beacon_state_root)?
.ok_or_else(|| Error::MissingBeaconState(beacon_state_root))?;
// If we switched to a new chain (instead of building atop the present chain).
if self.head().beacon_block_root != beacon_block.previous_block_root {
self.metrics.fork_choice_reorg_count.inc();
};
let old_finalized_epoch = self.head().beacon_state.finalized_epoch;
let new_finalized_epoch = beacon_state.finalized_epoch;
let finalized_root = beacon_state.finalized_root;
// Never revert back past a finalized epoch.
if new_finalized_epoch < old_finalized_epoch {
Err(Error::RevertedFinalizedEpoch {
previous_epoch: old_finalized_epoch,
new_epoch: new_finalized_epoch,
})
} else {
self.update_canonical_head(CheckPoint {
beacon_block: beacon_block,
beacon_block_root,
beacon_state,
beacon_state_root,
})?;
if new_finalized_epoch != old_finalized_epoch {
self.after_finalization(old_finalized_epoch, finalized_root)?;
}
Ok(())
}
} else {
Ok(())
}
}
/// Update the canonical head to `new_head`.
fn update_canonical_head(&self, new_head: CheckPoint<T::EthSpec>) -> Result<(), Error> {
// Update the checkpoint that stores the head of the chain at the time it received the
// block.
*self.canonical_head.write() = new_head;
// Update the always-at-the-present-slot state we keep around for performance gains.
*self.state.write() = {
let mut state = self.canonical_head.read().beacon_state.clone();
let present_slot = match self.slot_clock.present_slot() {
Ok(Some(slot)) => slot,
_ => return Err(Error::UnableToReadSlot),
};
// If required, transition the new state to the present slot.
for _ in state.slot.as_u64()..present_slot.as_u64() {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_all_caches(&self.spec)?;
state
};
// Save `self` to `self.store`.
self.persist()?;
Ok(())
}
/// Called after `self` has had a new block finalized.
///
/// Performs pruning and finality-based optimizations.
fn after_finalization(
&self,
old_finalized_epoch: Epoch,
finalized_block_root: Hash256,
) -> Result<(), Error> {
let finalized_block = self
.store
.get::<BeaconBlock>(&finalized_block_root)?
.ok_or_else(|| Error::MissingBeaconBlock(finalized_block_root))?;
let new_finalized_epoch = finalized_block.slot.epoch(T::EthSpec::slots_per_epoch());
if new_finalized_epoch < old_finalized_epoch {
Err(Error::RevertedFinalizedEpoch {
previous_epoch: old_finalized_epoch,
new_epoch: new_finalized_epoch,
})
} else {
self.fork_choice
.process_finalization(&finalized_block, finalized_block_root)?;
Ok(())
}
}
/// Returns `true` if the given block root has not been processed.
pub fn is_new_block_root(&self, beacon_block_root: &Hash256) -> Result<bool, Error> {
Ok(!self.store.exists::<BeaconBlock>(beacon_block_root)?)
}
/// Dumps the entire canonical chain, from the head to genesis to a vector for analysis.
///
/// This could be a very expensive operation and should only be done in testing/analysis
/// activities.
pub fn chain_dump(&self) -> Result<Vec<CheckPoint<T::EthSpec>>, Error> {
let mut dump = vec![];
let mut last_slot = CheckPoint {
beacon_block: self.head().beacon_block.clone(),
beacon_block_root: self.head().beacon_block_root,
beacon_state: self.head().beacon_state.clone(),
beacon_state_root: self.head().beacon_state_root,
};
dump.push(last_slot.clone());
loop {
let beacon_block_root = last_slot.beacon_block.previous_block_root;
if beacon_block_root == self.spec.zero_hash {
break; // Genesis has been reached.
}
let beacon_block: BeaconBlock =
self.store.get(&beacon_block_root)?.ok_or_else(|| {
Error::DBInconsistent(format!("Missing block {}", beacon_block_root))
})?;
let beacon_state_root = beacon_block.state_root;
let beacon_state = self.store.get(&beacon_state_root)?.ok_or_else(|| {
Error::DBInconsistent(format!("Missing state {}", beacon_state_root))
})?;
let slot = CheckPoint {
beacon_block,
beacon_block_root,
beacon_state,
beacon_state_root,
};
dump.push(slot.clone());
last_slot = slot;
}
dump.reverse();
Ok(dump)
}
}
impl From<DBError> for Error {
fn from(e: DBError) -> Error {
Error::DBError(e)
}
}
impl From<ForkChoiceError> for Error {
fn from(e: ForkChoiceError) -> Error {
Error::ForkChoiceError(e)
}
}
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}
Processing attestation in fork choice
use crate::checkpoint::CheckPoint;
use crate::errors::{BeaconChainError as Error, BlockProductionError};
use crate::fork_choice::{Error as ForkChoiceError, ForkChoice};
use crate::metrics::Metrics;
use crate::persisted_beacon_chain::{PersistedBeaconChain, BEACON_CHAIN_DB_KEY};
use lmd_ghost::LmdGhost;
use log::trace;
use operation_pool::DepositInsertStatus;
use operation_pool::{OperationPool, PersistedOperationPool};
use parking_lot::{RwLock, RwLockReadGuard};
use slot_clock::SlotClock;
use state_processing::per_block_processing::errors::{
AttestationValidationError, AttesterSlashingValidationError, DepositValidationError,
ExitValidationError, ProposerSlashingValidationError, TransferValidationError,
};
use state_processing::{
per_block_processing, per_block_processing_without_verifying_block_signature,
per_slot_processing, BlockProcessingError,
};
use std::sync::Arc;
use store::iter::{BlockIterator, BlockRootsIterator, StateRootsIterator};
use store::{Error as DBError, Store};
use tree_hash::TreeHash;
use types::*;
// Text included in blocks.
// Must be 32-bytes or panic.
//
// |-------must be this long------|
pub const GRAFFITI: &str = "sigp/lighthouse-0.0.0-prerelease";
#[derive(Debug, PartialEq)]
pub enum BlockProcessingOutcome {
/// Block was valid and imported into the block graph.
Processed { block_root: Hash256 },
/// The blocks parent_root is unknown.
ParentUnknown { parent: Hash256 },
/// The block slot is greater than the present slot.
FutureSlot {
present_slot: Slot,
block_slot: Slot,
},
/// The block state_root does not match the generated state.
StateRootMismatch,
/// The block was a genesis block, these blocks cannot be re-imported.
GenesisBlock,
/// The slot is finalized, no need to import.
FinalizedSlot,
/// Block is already known, no need to re-import.
BlockIsAlreadyKnown,
/// The block could not be applied to the state, it is invalid.
PerBlockProcessingError(BlockProcessingError),
}
pub trait BeaconChainTypes {
type Store: store::Store;
type SlotClock: slot_clock::SlotClock;
type LmdGhost: LmdGhost<Self::Store, Self::EthSpec>;
type EthSpec: types::EthSpec;
}
/// Represents the "Beacon Chain" component of Ethereum 2.0. Allows import of blocks and block
/// operations and chooses a canonical head.
pub struct BeaconChain<T: BeaconChainTypes> {
pub spec: ChainSpec,
/// Persistent storage for blocks, states, etc. Typically an on-disk store, such as LevelDB.
pub store: Arc<T::Store>,
/// Reports the current slot, typically based upon the system clock.
pub slot_clock: T::SlotClock,
/// Stores all operations (e.g., `Attestation`, `Deposit`, etc) that are candidates for
/// inclusion in a block.
pub op_pool: OperationPool<T::EthSpec>,
/// Stores a "snapshot" of the chain at the time the head-of-the-chain block was recieved.
canonical_head: RwLock<CheckPoint<T::EthSpec>>,
/// The same state from `self.canonical_head`, but updated at the start of each slot with a
/// skip slot if no block is recieved. This is effectively a cache that avoids repeating calls
/// to `per_slot_processing`.
state: RwLock<BeaconState<T::EthSpec>>,
/// The root of the genesis block.
genesis_block_root: Hash256,
/// A state-machine that is updated with information from the network and chooses a canonical
/// head block.
pub fork_choice: ForkChoice<T>,
/// Stores metrics about this `BeaconChain`.
pub metrics: Metrics,
}
impl<T: BeaconChainTypes> BeaconChain<T> {
/// Instantiate a new Beacon Chain, from genesis.
pub fn from_genesis(
store: Arc<T::Store>,
slot_clock: T::SlotClock,
mut genesis_state: BeaconState<T::EthSpec>,
genesis_block: BeaconBlock,
spec: ChainSpec,
) -> Result<Self, Error> {
genesis_state.build_all_caches(&spec)?;
let state_root = genesis_state.canonical_root();
store.put(&state_root, &genesis_state)?;
let genesis_block_root = genesis_block.block_header().canonical_root();
store.put(&genesis_block_root, &genesis_block)?;
// Also store the genesis block under the `ZERO_HASH` key.
let genesis_block_root = genesis_block.block_header().canonical_root();
store.put(&spec.zero_hash, &genesis_block)?;
let canonical_head = RwLock::new(CheckPoint::new(
genesis_block.clone(),
genesis_block_root,
genesis_state.clone(),
state_root,
));
Ok(Self {
spec,
slot_clock,
op_pool: OperationPool::new(),
state: RwLock::new(genesis_state),
canonical_head,
genesis_block_root,
fork_choice: ForkChoice::new(store.clone(), &genesis_block, genesis_block_root),
metrics: Metrics::new()?,
store,
})
}
/// Attempt to load an existing instance from the given `store`.
pub fn from_store(
store: Arc<T::Store>,
spec: ChainSpec,
) -> Result<Option<BeaconChain<T>>, Error> {
let key = Hash256::from_slice(&BEACON_CHAIN_DB_KEY.as_bytes());
let p: PersistedBeaconChain<T> = match store.get(&key) {
Err(e) => return Err(e.into()),
Ok(None) => return Ok(None),
Ok(Some(p)) => p,
};
let slot_clock = T::SlotClock::new(
spec.genesis_slot,
p.state.genesis_time,
spec.seconds_per_slot,
);
let last_finalized_root = p.canonical_head.beacon_state.finalized_root;
let last_finalized_block = &p.canonical_head.beacon_block;
let op_pool = p.op_pool.into_operation_pool(&p.state, &spec);
Ok(Some(BeaconChain {
spec,
slot_clock,
fork_choice: ForkChoice::new(store.clone(), last_finalized_block, last_finalized_root),
op_pool,
canonical_head: RwLock::new(p.canonical_head),
state: RwLock::new(p.state),
genesis_block_root: p.genesis_block_root,
metrics: Metrics::new()?,
store,
}))
}
/// Attempt to save this instance to `self.store`.
pub fn persist(&self) -> Result<(), Error> {
let p: PersistedBeaconChain<T> = PersistedBeaconChain {
canonical_head: self.canonical_head.read().clone(),
op_pool: PersistedOperationPool::from_operation_pool(&self.op_pool),
genesis_block_root: self.genesis_block_root,
state: self.state.read().clone(),
};
let key = Hash256::from_slice(&BEACON_CHAIN_DB_KEY.as_bytes());
self.store.put(&key, &p)?;
Ok(())
}
/// Returns the beacon block body for each beacon block root in `roots`.
///
/// Fails if any root in `roots` does not have a corresponding block.
pub fn get_block_bodies(&self, roots: &[Hash256]) -> Result<Vec<BeaconBlockBody>, Error> {
let bodies: Result<Vec<BeaconBlockBody>, _> = roots
.iter()
.map(|root| match self.get_block(root)? {
Some(block) => Ok(block.body),
None => Err(Error::DBInconsistent(format!("Missing block: {}", root))),
})
.collect();
Ok(bodies?)
}
/// Returns the beacon block header for each beacon block root in `roots`.
///
/// Fails if any root in `roots` does not have a corresponding block.
pub fn get_block_headers(&self, roots: &[Hash256]) -> Result<Vec<BeaconBlockHeader>, Error> {
let headers: Result<Vec<BeaconBlockHeader>, _> = roots
.iter()
.map(|root| match self.get_block(root)? {
Some(block) => Ok(block.block_header()),
None => Err(Error::DBInconsistent("Missing block".into())),
})
.collect();
Ok(headers?)
}
/// Iterate in reverse (highest to lowest slot) through all blocks from the block at `slot`
/// through to the genesis block.
///
/// Returns `None` for headers prior to genesis or when there is an error reading from `Store`.
///
/// Contains duplicate headers when skip slots are encountered.
pub fn rev_iter_blocks(&self, slot: Slot) -> BlockIterator<T::EthSpec, T::Store> {
BlockIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Iterates in reverse (highest to lowest slot) through all block roots from `slot` through to
/// genesis.
///
/// Returns `None` for roots prior to genesis or when there is an error reading from `Store`.
///
/// Contains duplicate roots when skip slots are encountered.
pub fn rev_iter_block_roots(&self, slot: Slot) -> BlockRootsIterator<T::EthSpec, T::Store> {
BlockRootsIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Iterates in reverse (highest to lowest slot) through all state roots from `slot` through to
/// genesis.
///
/// Returns `None` for roots prior to genesis or when there is an error reading from `Store`.
pub fn rev_iter_state_roots(&self, slot: Slot) -> StateRootsIterator<T::EthSpec, T::Store> {
StateRootsIterator::owned(self.store.clone(), self.state.read().clone(), slot)
}
/// Returns the block at the given root, if any.
///
/// ## Errors
///
/// May return a database error.
pub fn get_block(&self, block_root: &Hash256) -> Result<Option<BeaconBlock>, Error> {
Ok(self.store.get(block_root)?)
}
/// Returns a read-lock guarded `BeaconState` which is the `canonical_head` that has been
/// updated to match the current slot clock.
pub fn current_state(&self) -> RwLockReadGuard<BeaconState<T::EthSpec>> {
self.state.read()
}
/// Returns a read-lock guarded `CheckPoint` struct for reading the head (as chosen by the
/// fork-choice rule).
///
/// It is important to note that the `beacon_state` returned may not match the present slot. It
/// is the state as it was when the head block was received, which could be some slots prior to
/// now.
pub fn head(&self) -> RwLockReadGuard<CheckPoint<T::EthSpec>> {
self.canonical_head.read()
}
/// Returns the slot of the highest block in the canonical chain.
pub fn best_slot(&self) -> Slot {
self.canonical_head.read().beacon_block.slot
}
/// Ensures the current canonical `BeaconState` has been transitioned to match the `slot_clock`.
pub fn catchup_state(&self) -> Result<(), Error> {
let spec = &self.spec;
let present_slot = match self.slot_clock.present_slot() {
Ok(Some(slot)) => slot,
_ => return Err(Error::UnableToReadSlot),
};
if self.state.read().slot < present_slot {
let mut state = self.state.write();
// If required, transition the new state to the present slot.
for _ in state.slot.as_u64()..present_slot.as_u64() {
// Ensure the next epoch state caches are built in case of an epoch transition.
state.build_committee_cache(RelativeEpoch::Next, spec)?;
per_slot_processing(&mut *state, spec)?;
}
state.build_all_caches(spec)?;
}
Ok(())
}
/// Build all of the caches on the current state.
///
/// Ideally this shouldn't be required, however we leave it here for testing.
pub fn ensure_state_caches_are_built(&self) -> Result<(), Error> {
self.state.write().build_all_caches(&self.spec)?;
Ok(())
}
/// Returns the validator index (if any) for the given public key.
///
/// Information is retrieved from the present `beacon_state.validator_registry`.
pub fn validator_index(&self, pubkey: &PublicKey) -> Option<usize> {
for (i, validator) in self
.head()
.beacon_state
.validator_registry
.iter()
.enumerate()
{
if validator.pubkey == *pubkey {
return Some(i);
}
}
None
}
/// Reads the slot clock, returns `None` if the slot is unavailable.
///
/// The slot might be unavailable due to an error with the system clock, or if the present time
/// is before genesis (i.e., a negative slot).
///
/// This is distinct to `present_slot`, which simply reads the latest state. If a
/// call to `read_slot_clock` results in a higher slot than a call to `present_slot`,
/// `self.state` should undergo per slot processing.
pub fn read_slot_clock(&self) -> Option<Slot> {
match self.slot_clock.present_slot() {
Ok(Some(some_slot)) => Some(some_slot),
Ok(None) => None,
_ => None,
}
}
/// Reads the slot clock (see `self.read_slot_clock()` and returns the number of slots since
/// genesis.
pub fn slots_since_genesis(&self) -> Option<SlotHeight> {
let now = self.read_slot_clock()?;
let genesis_slot = self.spec.genesis_slot;
if now < genesis_slot {
None
} else {
Some(SlotHeight::from(now.as_u64() - genesis_slot.as_u64()))
}
}
/// Returns slot of the present state.
///
/// This is distinct to `read_slot_clock`, which reads from the actual system clock. If
/// `self.state` has not been transitioned it is possible for the system clock to be on a
/// different slot to what is returned from this call.
pub fn present_slot(&self) -> Slot {
self.state.read().slot
}
/// Returns the block proposer for a given slot.
///
/// Information is read from the present `beacon_state` shuffling, only information from the
/// present epoch is available.
pub fn block_proposer(&self, slot: Slot) -> Result<usize, Error> {
// Ensures that the present state has been advanced to the present slot, skipping slots if
// blocks are not present.
self.catchup_state()?;
// TODO: permit lookups of the proposer at any slot.
let index = self.state.read().get_beacon_proposer_index(
slot,
RelativeEpoch::Current,
&self.spec,
)?;
Ok(index)
}
/// Returns the attestation slot and shard for a given validator index.
///
/// Information is read from the current state, so only information from the present and prior
/// epoch is available.
pub fn validator_attestion_slot_and_shard(
&self,
validator_index: usize,
) -> Result<Option<(Slot, u64)>, BeaconStateError> {
trace!(
"BeaconChain::validator_attestion_slot_and_shard: validator_index: {}",
validator_index
);
if let Some(attestation_duty) = self
.state
.read()
.get_attestation_duties(validator_index, RelativeEpoch::Current)?
{
Ok(Some((attestation_duty.slot, attestation_duty.shard)))
} else {
Ok(None)
}
}
/// Produce an `AttestationData` that is valid for the present `slot` and given `shard`.
///
/// Attests to the canonical chain.
pub fn produce_attestation_data(&self, shard: u64) -> Result<AttestationData, Error> {
let state = self.state.read();
let head_block_root = self.head().beacon_block_root;
let head_block_slot = self.head().beacon_block.slot;
self.produce_attestation_data_for_block(shard, head_block_root, head_block_slot, &*state)
}
/// Produce an `AttestationData` that attests to the chain denoted by `block_root` and `state`.
///
/// Permits attesting to any arbitrary chain. Generally, the `produce_attestation_data`
/// function should be used as it attests to the canonical chain.
pub fn produce_attestation_data_for_block(
&self,
shard: u64,
head_block_root: Hash256,
head_block_slot: Slot,
state: &BeaconState<T::EthSpec>,
) -> Result<AttestationData, Error> {
// Collect some metrics.
self.metrics.attestation_production_requests.inc();
let timer = self.metrics.attestation_production_times.start_timer();
let slots_per_epoch = T::EthSpec::slots_per_epoch();
let current_epoch_start_slot = state.current_epoch().start_slot(slots_per_epoch);
// The `target_root` is the root of the first block of the current epoch.
//
// The `state` does not know the root of the block for it's current slot (it only knows
// about blocks from prior slots). This creates an edge-case when the state is on the first
// slot of the epoch -- we're unable to obtain the `target_root` because it is not a prior
// root.
//
// This edge case is handled in two ways:
//
// - If the head block is on the same slot as the state, we use it's root.
// - Otherwise, assume the current slot has been skipped and use the block root from the
// prior slot.
//
// For all other cases, we simply read the `target_root` from `state.latest_block_roots`.
let target_root = if state.slot == current_epoch_start_slot {
if head_block_slot == current_epoch_start_slot {
head_block_root
} else {
*state.get_block_root(current_epoch_start_slot - 1)?
}
} else {
*state.get_block_root(current_epoch_start_slot)?
};
let previous_crosslink_root =
Hash256::from_slice(&state.get_current_crosslink(shard)?.tree_hash_root());
// Collect some metrics.
self.metrics.attestation_production_successes.inc();
timer.observe_duration();
Ok(AttestationData {
beacon_block_root: head_block_root,
source_epoch: state.current_justified_epoch,
source_root: state.current_justified_root,
target_epoch: state.current_epoch(),
target_root,
shard,
previous_crosslink_root,
crosslink_data_root: Hash256::zero(),
})
}
/// Accept a new attestation from the network.
///
/// If valid, the attestation is added to the `op_pool` and aggregated with another attestation
/// if possible.
pub fn process_attestation(
&self,
attestation: Attestation,
) -> Result<(), AttestationValidationError> {
self.metrics.attestation_processing_requests.inc();
let timer = self.metrics.attestation_processing_times.start_timer();
match self.store.exists::<BeaconBlock>(&attestation.data.target_root) {
Ok(true) => {
per_block_processing::validate_attestation_time_independent_only(&*self.state.read(), &attestation, &self.spec)?;
self.fork_choice.process_attestation(&*self.state.read(), &attestation);
},
_ => {}
};
let result = self
.op_pool
.insert_attestation(attestation, &*self.state.read(), &self.spec);
timer.observe_duration();
if result.is_ok() {
self.metrics.attestation_processing_successes.inc();
}
// TODO: process attestation. Please consider:
//
// - Because a block was not added to the op pool does not mean it's invalid (it might
// just be old).
// - The attestation should be rejected if we don't know the block (ideally it should be
// queued, but this may be overkill).
// - The attestation _must_ be validated against it's state before being added to fork
// choice.
// - You can avoid verifying some attestations by first checking if they're a latest
// message. This would involve expanding the `LmdGhost` API.
result
}
/// Accept some deposit and queue it for inclusion in an appropriate block.
pub fn process_deposit(
&self,
deposit: Deposit,
) -> Result<DepositInsertStatus, DepositValidationError> {
self.op_pool.insert_deposit(deposit)
}
/// Accept some exit and queue it for inclusion in an appropriate block.
pub fn process_voluntary_exit(&self, exit: VoluntaryExit) -> Result<(), ExitValidationError> {
self.op_pool
.insert_voluntary_exit(exit, &*self.state.read(), &self.spec)
}
/// Accept some transfer and queue it for inclusion in an appropriate block.
pub fn process_transfer(&self, transfer: Transfer) -> Result<(), TransferValidationError> {
self.op_pool
.insert_transfer(transfer, &*self.state.read(), &self.spec)
}
/// Accept some proposer slashing and queue it for inclusion in an appropriate block.
pub fn process_proposer_slashing(
&self,
proposer_slashing: ProposerSlashing,
) -> Result<(), ProposerSlashingValidationError> {
self.op_pool
.insert_proposer_slashing(proposer_slashing, &*self.state.read(), &self.spec)
}
/// Accept some attester slashing and queue it for inclusion in an appropriate block.
pub fn process_attester_slashing(
&self,
attester_slashing: AttesterSlashing,
) -> Result<(), AttesterSlashingValidationError> {
self.op_pool
.insert_attester_slashing(attester_slashing, &*self.state.read(), &self.spec)
}
/// Accept some block and attempt to add it to block DAG.
///
/// Will accept blocks from prior slots, however it will reject any block from a future slot.
pub fn process_block(&self, block: BeaconBlock) -> Result<BlockProcessingOutcome, Error> {
self.metrics.block_processing_requests.inc();
let timer = self.metrics.block_processing_times.start_timer();
let finalized_slot = self
.state
.read()
.finalized_epoch
.start_slot(T::EthSpec::slots_per_epoch());
if block.slot <= finalized_slot {
return Ok(BlockProcessingOutcome::FinalizedSlot);
}
if block.slot == 0 {
return Ok(BlockProcessingOutcome::GenesisBlock);
}
let block_root = block.block_header().canonical_root();
if block_root == self.genesis_block_root {
return Ok(BlockProcessingOutcome::GenesisBlock);
}
let present_slot = self
.read_slot_clock()
.ok_or_else(|| Error::UnableToReadSlot)?;
if block.slot > present_slot {
return Ok(BlockProcessingOutcome::FutureSlot {
present_slot,
block_slot: block.slot,
});
}
if self.store.exists::<BeaconBlock>(&block_root)? {
return Ok(BlockProcessingOutcome::BlockIsAlreadyKnown);
}
// Load the blocks parent block from the database, returning invalid if that block is not
// found.
let parent_block_root = block.previous_block_root;
let parent_block: BeaconBlock = match self.store.get(&parent_block_root)? {
Some(previous_block_root) => previous_block_root,
None => {
return Ok(BlockProcessingOutcome::ParentUnknown {
parent: parent_block_root,
});
}
};
// Load the parent blocks state from the database, returning an error if it is not found.
// It is an error because if know the parent block we should also know the parent state.
let parent_state_root = parent_block.state_root;
let parent_state = self
.store
.get(&parent_state_root)?
.ok_or_else(|| Error::DBInconsistent(format!("Missing state {}", parent_state_root)))?;
// Transition the parent state to the block slot.
let mut state: BeaconState<T::EthSpec> = parent_state;
for _ in state.slot.as_u64()..block.slot.as_u64() {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_committee_cache(RelativeEpoch::Current, &self.spec)?;
// Apply the received block to its parent state (which has been transitioned into this
// slot).
match per_block_processing(&mut state, &block, &self.spec) {
Err(BlockProcessingError::BeaconStateError(e)) => {
return Err(Error::BeaconStateError(e))
}
Err(e) => return Ok(BlockProcessingOutcome::PerBlockProcessingError(e)),
_ => {}
}
let state_root = state.canonical_root();
if block.state_root != state_root {
return Ok(BlockProcessingOutcome::StateRootMismatch);
}
// Store the block and state.
self.store.put(&block_root, &block)?;
self.store.put(&state_root, &state)?;
// Register the new block with the fork choice service.
self.fork_choice.process_block(&state, &block, block_root)?;
// Execute the fork choice algorithm, enthroning a new head if discovered.
//
// Note: in the future we may choose to run fork-choice less often, potentially based upon
// some heuristic around number of attestations seen for the block.
self.fork_choice()?;
self.metrics.block_processing_successes.inc();
self.metrics
.operations_per_block_attestation
.observe(block.body.attestations.len() as f64);
timer.observe_duration();
Ok(BlockProcessingOutcome::Processed { block_root })
}
/// Produce a new block at the present slot.
///
/// The produced block will not be inherently valid, it must be signed by a block producer.
/// Block signing is out of the scope of this function and should be done by a separate program.
pub fn produce_block(
&self,
randao_reveal: Signature,
) -> Result<(BeaconBlock, BeaconState<T::EthSpec>), BlockProductionError> {
let state = self.state.read().clone();
let slot = self
.read_slot_clock()
.ok_or_else(|| BlockProductionError::UnableToReadSlot)?;
self.produce_block_on_state(state, slot, randao_reveal)
}
/// Produce a block for some `slot` upon the given `state`.
///
/// Typically the `self.produce_block()` function should be used, instead of calling this
/// function directly. This function is useful for purposefully creating forks or blocks at
/// non-current slots.
///
/// The given state will be advanced to the given `produce_at_slot`, then a block will be
/// produced at that slot height.
pub fn produce_block_on_state(
&self,
mut state: BeaconState<T::EthSpec>,
produce_at_slot: Slot,
randao_reveal: Signature,
) -> Result<(BeaconBlock, BeaconState<T::EthSpec>), BlockProductionError> {
self.metrics.block_production_requests.inc();
let timer = self.metrics.block_production_times.start_timer();
// If required, transition the new state to the present slot.
while state.slot < produce_at_slot {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_committee_cache(RelativeEpoch::Current, &self.spec)?;
let previous_block_root = if state.slot > 0 {
*state
.get_block_root(state.slot - 1)
.map_err(|_| BlockProductionError::UnableToGetBlockRootFromState)?
} else {
state.latest_block_header.canonical_root()
};
let mut graffiti: [u8; 32] = [0; 32];
graffiti.copy_from_slice(GRAFFITI.as_bytes());
let (proposer_slashings, attester_slashings) =
self.op_pool.get_slashings(&state, &self.spec);
let mut block = BeaconBlock {
slot: state.slot,
previous_block_root,
state_root: Hash256::zero(), // Updated after the state is calculated.
signature: Signature::empty_signature(), // To be completed by a validator.
body: BeaconBlockBody {
randao_reveal,
// TODO: replace with real data.
eth1_data: Eth1Data {
deposit_count: 0,
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
},
graffiti,
proposer_slashings,
attester_slashings,
attestations: self.op_pool.get_attestations(&state, &self.spec),
deposits: self.op_pool.get_deposits(&state, &self.spec),
voluntary_exits: self.op_pool.get_voluntary_exits(&state, &self.spec),
transfers: self.op_pool.get_transfers(&state, &self.spec),
},
};
per_block_processing_without_verifying_block_signature(&mut state, &block, &self.spec)?;
let state_root = state.canonical_root();
block.state_root = state_root;
self.metrics.block_production_successes.inc();
timer.observe_duration();
Ok((block, state))
}
/// Execute the fork choice algorithm and enthrone the result as the canonical head.
pub fn fork_choice(&self) -> Result<(), Error> {
self.metrics.fork_choice_requests.inc();
// Start fork choice metrics timer.
let timer = self.metrics.fork_choice_times.start_timer();
// Determine the root of the block that is the head of the chain.
let beacon_block_root = self.fork_choice.find_head(&self)?;
// End fork choice metrics timer.
timer.observe_duration();
// If a new head was chosen.
if beacon_block_root != self.head().beacon_block_root {
self.metrics.fork_choice_changed_head.inc();
let beacon_block: BeaconBlock = self
.store
.get(&beacon_block_root)?
.ok_or_else(|| Error::MissingBeaconBlock(beacon_block_root))?;
let beacon_state_root = beacon_block.state_root;
let beacon_state: BeaconState<T::EthSpec> = self
.store
.get(&beacon_state_root)?
.ok_or_else(|| Error::MissingBeaconState(beacon_state_root))?;
// If we switched to a new chain (instead of building atop the present chain).
if self.head().beacon_block_root != beacon_block.previous_block_root {
self.metrics.fork_choice_reorg_count.inc();
};
let old_finalized_epoch = self.head().beacon_state.finalized_epoch;
let new_finalized_epoch = beacon_state.finalized_epoch;
let finalized_root = beacon_state.finalized_root;
// Never revert back past a finalized epoch.
if new_finalized_epoch < old_finalized_epoch {
Err(Error::RevertedFinalizedEpoch {
previous_epoch: old_finalized_epoch,
new_epoch: new_finalized_epoch,
})
} else {
self.update_canonical_head(CheckPoint {
beacon_block: beacon_block,
beacon_block_root,
beacon_state,
beacon_state_root,
})?;
if new_finalized_epoch != old_finalized_epoch {
self.after_finalization(old_finalized_epoch, finalized_root)?;
}
Ok(())
}
} else {
Ok(())
}
}
/// Update the canonical head to `new_head`.
fn update_canonical_head(&self, new_head: CheckPoint<T::EthSpec>) -> Result<(), Error> {
// Update the checkpoint that stores the head of the chain at the time it received the
// block.
*self.canonical_head.write() = new_head;
// Update the always-at-the-present-slot state we keep around for performance gains.
*self.state.write() = {
let mut state = self.canonical_head.read().beacon_state.clone();
let present_slot = match self.slot_clock.present_slot() {
Ok(Some(slot)) => slot,
_ => return Err(Error::UnableToReadSlot),
};
// If required, transition the new state to the present slot.
for _ in state.slot.as_u64()..present_slot.as_u64() {
per_slot_processing(&mut state, &self.spec)?;
}
state.build_all_caches(&self.spec)?;
state
};
// Save `self` to `self.store`.
self.persist()?;
Ok(())
}
/// Called after `self` has had a new block finalized.
///
/// Performs pruning and finality-based optimizations.
fn after_finalization(
&self,
old_finalized_epoch: Epoch,
finalized_block_root: Hash256,
) -> Result<(), Error> {
let finalized_block = self
.store
.get::<BeaconBlock>(&finalized_block_root)?
.ok_or_else(|| Error::MissingBeaconBlock(finalized_block_root))?;
let new_finalized_epoch = finalized_block.slot.epoch(T::EthSpec::slots_per_epoch());
if new_finalized_epoch < old_finalized_epoch {
Err(Error::RevertedFinalizedEpoch {
previous_epoch: old_finalized_epoch,
new_epoch: new_finalized_epoch,
})
} else {
self.fork_choice
.process_finalization(&finalized_block, finalized_block_root)?;
Ok(())
}
}
/// Returns `true` if the given block root has not been processed.
pub fn is_new_block_root(&self, beacon_block_root: &Hash256) -> Result<bool, Error> {
Ok(!self.store.exists::<BeaconBlock>(beacon_block_root)?)
}
/// Dumps the entire canonical chain, from the head to genesis to a vector for analysis.
///
/// This could be a very expensive operation and should only be done in testing/analysis
/// activities.
pub fn chain_dump(&self) -> Result<Vec<CheckPoint<T::EthSpec>>, Error> {
let mut dump = vec![];
let mut last_slot = CheckPoint {
beacon_block: self.head().beacon_block.clone(),
beacon_block_root: self.head().beacon_block_root,
beacon_state: self.head().beacon_state.clone(),
beacon_state_root: self.head().beacon_state_root,
};
dump.push(last_slot.clone());
loop {
let beacon_block_root = last_slot.beacon_block.previous_block_root;
if beacon_block_root == self.spec.zero_hash {
break; // Genesis has been reached.
}
let beacon_block: BeaconBlock =
self.store.get(&beacon_block_root)?.ok_or_else(|| {
Error::DBInconsistent(format!("Missing block {}", beacon_block_root))
})?;
let beacon_state_root = beacon_block.state_root;
let beacon_state = self.store.get(&beacon_state_root)?.ok_or_else(|| {
Error::DBInconsistent(format!("Missing state {}", beacon_state_root))
})?;
let slot = CheckPoint {
beacon_block,
beacon_block_root,
beacon_state,
beacon_state_root,
};
dump.push(slot.clone());
last_slot = slot;
}
dump.reverse();
Ok(dump)
}
}
impl From<DBError> for Error {
fn from(e: DBError) -> Error {
Error::DBError(e)
}
}
impl From<ForkChoiceError> for Error {
fn from(e: ForkChoiceError) -> Error {
Error::ForkChoiceError(e)
}
}
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}
|
//! Log all requests in a format similar to Heroku's router, but with additional
//! information that we care about like User-Agent
use super::prelude::*;
use crate::util::request_header;
use conduit::{header, RequestExt, StatusCode};
use conduit_cookie::RequestSession;
use std::fmt::{self, Display, Formatter};
use std::time::{SystemTime, UNIX_EPOCH};
const SLOW_REQUEST_THRESHOLD_MS: u64 = 1000;
#[derive(Default)]
pub(super) struct LogRequests();
struct OriginalPath(String);
impl Middleware for LogRequests {
fn before(&self, req: &mut dyn RequestExt) -> BeforeResult {
let path = OriginalPath(req.path().to_string());
req.mut_extensions().insert(path);
Ok(())
}
fn after(&self, req: &mut dyn RequestExt, res: AfterResult) -> AfterResult {
let response_time =
if let Ok(start_ms) = request_header(req, "x-request-start").parse::<u128>() {
let current_ms = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went way backwards")
.as_millis();
if current_ms > start_ms {
// The result cannot be negative
current_ms - start_ms
} else {
// Because our nginx proxy and app run on the same dyno in production, we
// shouldn't have to worry about clock drift. But if something goes wrong,
// calculate the response time based on when the request reached this app.
fallback_response_time(req)
}
} else {
// X-Request-Start header couldn't be parsed.
// We are probably running locally and not behind nginx.
fallback_response_time(req)
};
// This will only trucate for requests lasting > 500 million years
let response_time = response_time as u64;
println!(
"{}",
RequestLine {
req,
res: &res,
response_time,
}
);
report_to_sentry(req, &res, response_time);
res
}
}
/// Calculate the response time based on when the request reached the in-app web server.
///
/// This serves as a fallback in case the `X-Request-Start` header is missing or invalid.
fn fallback_response_time(req: &mut dyn RequestExt) -> u128 {
req.elapsed().as_millis()
}
struct CustomMetadata {
entries: Vec<(&'static str, String)>,
}
pub fn add_custom_metadata<V: Display>(req: &mut dyn RequestExt, key: &'static str, value: V) {
if let Some(metadata) = req.mut_extensions().find_mut::<CustomMetadata>() {
metadata.entries.push((key, value.to_string()));
} else {
let mut metadata = CustomMetadata {
entries: Vec::new(),
};
metadata.entries.push((key, value.to_string()));
req.mut_extensions().insert(metadata);
}
}
fn report_to_sentry(req: &dyn RequestExt, res: &AfterResult, response_time: u64) {
sentry::configure_scope(|scope| {
{
let id = req.session().get("user_id").map(|str| str.to_string());
let user = sentry::User {
id,
..Default::default()
};
scope.set_user(Some(user));
}
if let Some(request_id) = req
.headers()
.get("x-request-id")
.and_then(|value| value.to_str().ok())
{
scope.set_tag("request.id", request_id);
}
{
let status = res
.as_ref()
.map(|resp| resp.status())
.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
scope.set_tag("response.status", status.as_str());
}
scope.set_extra("Response time [ms]", response_time.into());
if let Some(metadata) = req.extensions().find::<CustomMetadata>() {
for (key, value) in &metadata.entries {
scope.set_extra(key, value.to_string().into());
}
}
});
}
#[cfg(test)]
pub(crate) fn get_log_message(req: &dyn RequestExt, key: &'static str) -> String {
// Unwrap shouldn't panic as no other code has access to the private struct to remove it
for (k, v) in &req.extensions().find::<CustomMetadata>().unwrap().entries {
if key == *k {
return v.clone();
}
}
panic!("expected log message for {} not found", key);
}
struct RequestLine<'r> {
req: &'r dyn RequestExt,
res: &'r AfterResult,
response_time: u64,
}
impl Display for RequestLine<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut line = LogLine::new(f);
let status = self.res.as_ref().map(|res| res.status());
let status = status.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
let at = if status.is_server_error() {
"error"
} else {
"info"
};
line.add_field("at", at)?;
line.add_field("method", self.req.method())?;
line.add_quoted_field("path", FullPath(self.req))?;
// The request_id is not logged for successful download requests
if !(self.req.path().ends_with("/download")
&& self
.res
.as_ref()
.ok()
.map(|ok| ok.status().is_redirection())
== Some(true))
{
line.add_field("request_id", request_header(self.req, "x-request-id"))?;
}
line.add_quoted_field("fwd", request_header(self.req, "x-real-ip"))?;
line.add_field("service", TimeMs(self.response_time))?;
line.add_field("status", status.as_str())?;
line.add_quoted_field("user_agent", request_header(self.req, header::USER_AGENT))?;
if let Some(metadata) = self.req.extensions().find::<CustomMetadata>() {
for (key, value) in &metadata.entries {
line.add_quoted_field(key, value)?;
}
}
if let Err(err) = self.res {
line.add_quoted_field("error", err)?;
}
if self.response_time > SLOW_REQUEST_THRESHOLD_MS {
line.add_marker("SLOW REQUEST")?;
}
Ok(())
}
}
struct FullPath<'a>(&'a dyn RequestExt);
impl<'a> Display for FullPath<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
// Unwrap shouldn't panic as no other code has access to the private struct to remove it
write!(
f,
"{}",
self.0.extensions().find::<OriginalPath>().unwrap().0
)?;
if let Some(q_string) = self.0.query_string() {
write!(f, "?{}", q_string)?;
}
Ok(())
}
}
struct TimeMs(u64);
impl Display for TimeMs {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)?;
f.write_str("ms")?;
Ok(())
}
}
struct LogLine<'f, 'g> {
f: &'f mut Formatter<'g>,
first: bool,
}
impl<'f, 'g> LogLine<'f, 'g> {
fn new(f: &'f mut Formatter<'g>) -> Self {
Self { f, first: true }
}
fn add_field<K: Display, V: Display>(&mut self, key: K, value: V) -> fmt::Result {
self.start_item()?;
key.fmt(self.f)?;
self.f.write_str("=")?;
value.fmt(self.f)?;
Ok(())
}
fn add_quoted_field<K: Display, V: Display>(&mut self, key: K, value: V) -> fmt::Result {
self.start_item()?;
key.fmt(self.f)?;
self.f.write_str("=\"")?;
value.fmt(self.f)?;
self.f.write_str("\"")?;
Ok(())
}
fn add_marker<M: Display>(&mut self, marker: M) -> fmt::Result {
self.start_item()?;
marker.fmt(self.f)?;
Ok(())
}
fn start_item(&mut self) -> fmt::Result {
if !self.first {
self.f.write_str(" ")?;
}
self.first = false;
Ok(())
}
}
middleware::log_request: Move custom metadata handling for Sentry into `add_custom_metadata()`
This allows us to add the metadata to issue reports as soon as possible, instead of only doing it in the `after()` method of the middleware.
//! Log all requests in a format similar to Heroku's router, but with additional
//! information that we care about like User-Agent
use super::prelude::*;
use crate::util::request_header;
use conduit::{header, RequestExt, StatusCode};
use conduit_cookie::RequestSession;
use std::fmt::{self, Display, Formatter};
use std::time::{SystemTime, UNIX_EPOCH};
const SLOW_REQUEST_THRESHOLD_MS: u64 = 1000;
#[derive(Default)]
pub(super) struct LogRequests();
struct OriginalPath(String);
impl Middleware for LogRequests {
fn before(&self, req: &mut dyn RequestExt) -> BeforeResult {
let path = OriginalPath(req.path().to_string());
req.mut_extensions().insert(path);
Ok(())
}
fn after(&self, req: &mut dyn RequestExt, res: AfterResult) -> AfterResult {
let response_time =
if let Ok(start_ms) = request_header(req, "x-request-start").parse::<u128>() {
let current_ms = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went way backwards")
.as_millis();
if current_ms > start_ms {
// The result cannot be negative
current_ms - start_ms
} else {
// Because our nginx proxy and app run on the same dyno in production, we
// shouldn't have to worry about clock drift. But if something goes wrong,
// calculate the response time based on when the request reached this app.
fallback_response_time(req)
}
} else {
// X-Request-Start header couldn't be parsed.
// We are probably running locally and not behind nginx.
fallback_response_time(req)
};
// This will only trucate for requests lasting > 500 million years
let response_time = response_time as u64;
println!(
"{}",
RequestLine {
req,
res: &res,
response_time,
}
);
report_to_sentry(req, &res, response_time);
res
}
}
/// Calculate the response time based on when the request reached the in-app web server.
///
/// This serves as a fallback in case the `X-Request-Start` header is missing or invalid.
fn fallback_response_time(req: &mut dyn RequestExt) -> u128 {
req.elapsed().as_millis()
}
struct CustomMetadata {
entries: Vec<(&'static str, String)>,
}
pub fn add_custom_metadata<V: Display>(req: &mut dyn RequestExt, key: &'static str, value: V) {
if let Some(metadata) = req.mut_extensions().find_mut::<CustomMetadata>() {
metadata.entries.push((key, value.to_string()));
} else {
let mut metadata = CustomMetadata {
entries: Vec::new(),
};
metadata.entries.push((key, value.to_string()));
req.mut_extensions().insert(metadata);
}
sentry::configure_scope(|scope| scope.set_extra(key, value.to_string().into()));
}
fn report_to_sentry(req: &dyn RequestExt, res: &AfterResult, response_time: u64) {
sentry::configure_scope(|scope| {
{
let id = req.session().get("user_id").map(|str| str.to_string());
let user = sentry::User {
id,
..Default::default()
};
scope.set_user(Some(user));
}
if let Some(request_id) = req
.headers()
.get("x-request-id")
.and_then(|value| value.to_str().ok())
{
scope.set_tag("request.id", request_id);
}
{
let status = res
.as_ref()
.map(|resp| resp.status())
.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
scope.set_tag("response.status", status.as_str());
}
scope.set_extra("Response time [ms]", response_time.into());
});
}
#[cfg(test)]
pub(crate) fn get_log_message(req: &dyn RequestExt, key: &'static str) -> String {
// Unwrap shouldn't panic as no other code has access to the private struct to remove it
for (k, v) in &req.extensions().find::<CustomMetadata>().unwrap().entries {
if key == *k {
return v.clone();
}
}
panic!("expected log message for {} not found", key);
}
struct RequestLine<'r> {
req: &'r dyn RequestExt,
res: &'r AfterResult,
response_time: u64,
}
impl Display for RequestLine<'_> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let mut line = LogLine::new(f);
let status = self.res.as_ref().map(|res| res.status());
let status = status.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
let at = if status.is_server_error() {
"error"
} else {
"info"
};
line.add_field("at", at)?;
line.add_field("method", self.req.method())?;
line.add_quoted_field("path", FullPath(self.req))?;
// The request_id is not logged for successful download requests
if !(self.req.path().ends_with("/download")
&& self
.res
.as_ref()
.ok()
.map(|ok| ok.status().is_redirection())
== Some(true))
{
line.add_field("request_id", request_header(self.req, "x-request-id"))?;
}
line.add_quoted_field("fwd", request_header(self.req, "x-real-ip"))?;
line.add_field("service", TimeMs(self.response_time))?;
line.add_field("status", status.as_str())?;
line.add_quoted_field("user_agent", request_header(self.req, header::USER_AGENT))?;
if let Some(metadata) = self.req.extensions().find::<CustomMetadata>() {
for (key, value) in &metadata.entries {
line.add_quoted_field(key, value)?;
}
}
if let Err(err) = self.res {
line.add_quoted_field("error", err)?;
}
if self.response_time > SLOW_REQUEST_THRESHOLD_MS {
line.add_marker("SLOW REQUEST")?;
}
Ok(())
}
}
struct FullPath<'a>(&'a dyn RequestExt);
impl<'a> Display for FullPath<'a> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
// Unwrap shouldn't panic as no other code has access to the private struct to remove it
write!(
f,
"{}",
self.0.extensions().find::<OriginalPath>().unwrap().0
)?;
if let Some(q_string) = self.0.query_string() {
write!(f, "?{}", q_string)?;
}
Ok(())
}
}
struct TimeMs(u64);
impl Display for TimeMs {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
self.0.fmt(f)?;
f.write_str("ms")?;
Ok(())
}
}
struct LogLine<'f, 'g> {
f: &'f mut Formatter<'g>,
first: bool,
}
impl<'f, 'g> LogLine<'f, 'g> {
fn new(f: &'f mut Formatter<'g>) -> Self {
Self { f, first: true }
}
fn add_field<K: Display, V: Display>(&mut self, key: K, value: V) -> fmt::Result {
self.start_item()?;
key.fmt(self.f)?;
self.f.write_str("=")?;
value.fmt(self.f)?;
Ok(())
}
fn add_quoted_field<K: Display, V: Display>(&mut self, key: K, value: V) -> fmt::Result {
self.start_item()?;
key.fmt(self.f)?;
self.f.write_str("=\"")?;
value.fmt(self.f)?;
self.f.write_str("\"")?;
Ok(())
}
fn add_marker<M: Display>(&mut self, marker: M) -> fmt::Result {
self.start_item()?;
marker.fmt(self.f)?;
Ok(())
}
fn start_item(&mut self) -> fmt::Result {
if !self.first {
self.f.write_str(" ")?;
}
self.first = false;
Ok(())
}
}
|
use ordermap::OrderMap;
use ordermap::Entry;
use ast::{Directive, FromInputValue, InputValue, Selection};
use executor::Variables;
use value::Value;
use schema::meta::{Argument, MetaType};
use executor::{ExecutionResult, Executor, Registry};
use parser::Spanning;
/// GraphQL type kind
///
/// The GraphQL specification defines a number of type kinds - the meta type
/// of a type.
#[derive(GraphQLEnum, Clone, Eq, PartialEq, Debug)]
// Note: _internal flag needed to make derive work in juniper crate itself.
#[graphql(name = "__TypeKind", _internal)]
pub enum TypeKind {
/// ## Scalar types
///
/// Scalar types appear as the leaf nodes of GraphQL queries. Strings,
/// numbers, and booleans are the built in types, and while it's possible
/// to define your own, it's relatively uncommon.
Scalar,
/// ## Object types
///
/// The most common type to be implemented by users. Objects have fields
/// and can implement interfaces.
Object,
/// ## Interface types
///
/// Interface types are used to represent overlapping fields between
/// multiple types, and can be queried for their concrete type.
Interface,
/// ## Union types
///
/// Unions are similar to interfaces but can not contain any fields on
/// their own.
Union,
/// ## Enum types
///
/// Like scalars, enum types appear as the leaf nodes of GraphQL queries.
Enum,
/// ## Input objects
///
/// Represents complex values provided in queries _into_ the system.
#[graphql(name = "INPUT_OBJECT")]
InputObject,
/// ## List types
///
/// Represent lists of other types. This library provides implementations
/// for vectors and slices, but other Rust types can be extended to serve
/// as GraphQL lists.
List,
/// ## Non-null types
///
/// In GraphQL, nullable types are the default. By putting a `!` after a
/// type, it becomes non-nullable.
#[graphql(name = "NON_NULL")]
NonNull,
}
/// Field argument container
pub struct Arguments<'a> {
args: Option<OrderMap<&'a str, InputValue>>,
}
impl<'a> Arguments<'a> {
#[doc(hidden)]
pub fn new(
mut args: Option<OrderMap<&'a str, InputValue>>,
meta_args: &'a Option<Vec<Argument>>,
) -> Arguments<'a> {
if meta_args.is_some() && args.is_none() {
args = Some(OrderMap::new());
}
if let (&mut Some(ref mut args), &Some(ref meta_args)) = (&mut args, meta_args) {
for arg in meta_args {
if !args.contains_key(arg.name.as_str()) || args[arg.name.as_str()].is_null() {
if let Some(ref default_value) = arg.default_value {
args.insert(arg.name.as_str(), default_value.clone());
} else {
args.insert(arg.name.as_str(), InputValue::null());
}
}
}
}
Arguments { args: args }
}
/// Get and convert an argument into the desired type.
///
/// If the argument is found, or a default argument has been provided,
/// the `InputValue` will be converted into the type `T`.
///
/// Returns `Some` if the argument is present _and_ type conversion
/// succeeeds.
pub fn get<T>(&self, key: &str) -> Option<T>
where
T: FromInputValue,
{
match self.args {
Some(ref args) => match args.get(key) {
Some(v) => Some(v.convert().unwrap()),
None => None,
},
None => None,
}
}
}
/**
Primary trait used to expose Rust types in a GraphQL schema
All of the convenience macros ultimately expand into an implementation of
this trait for the given type. The macros remove duplicated definitions of
fields and arguments, and add type checks on all resolve functions
automatically. This can all be done manually.
`GraphQLType` provides _some_ convenience methods for you, in the form of
optional trait methods. The `name` and `meta` methods are mandatory, but
other than that, it depends on what type you're exposing:
* Scalars, enums, lists and non null wrappers only require `resolve`,
* Interfaces and objects require `resolve_field` _or_ `resolve` if you want
to implement custom resolution logic (probably not),
* Interfaces and unions require `resolve_into_type` and `concrete_type_name`.
* Input objects do not require anything
## Example
Manually deriving an object is straightforward but tedious. This is the
equivalent of the `User` object as shown in the example in the documentation
root:
```rust
use juniper::{GraphQLType, Registry, FieldResult, Context,
Arguments, Executor, ExecutionResult};
use juniper::meta::MetaType;
# use std::collections::HashMap;
struct User { id: String, name: String, friend_ids: Vec<String> }
struct Database { users: HashMap<String, User> }
impl Context for Database {}
impl GraphQLType for User {
type Context = Database;
type TypeInfo = ();
fn name(_: &()) -> Option<&'static str> {
Some("User")
}
fn meta<'r>(_: &(), registry: &mut Registry<'r>) -> MetaType<'r> {
// First, we need to define all fields and their types on this type.
//
// If we need arguments, want to implement interfaces, or want to add
// documentation strings, we can do it here.
let fields = &[
registry.field::<&String>("id", &()),
registry.field::<&String>("name", &()),
registry.field::<Vec<&User>>("friends", &()),
];
registry.build_object_type::<User>(&(), fields).into_meta()
}
fn resolve_field(
&self,
info: &(),
field_name: &str,
args: &Arguments,
executor: &Executor<Database>
)
-> ExecutionResult
{
// Next, we need to match the queried field name. All arms of this
// match statement return `ExecutionResult`, which makes it hard to
// statically verify that the type you pass on to `executor.resolve*`
// actually matches the one that you defined in `meta()` above.
let database = executor.context();
match field_name {
// Because scalars are defined with another `Context` associated
// type, you must use resolve_with_ctx here to make the executor
// perform automatic type conversion of its argument.
"id" => executor.resolve_with_ctx(info, &self.id),
"name" => executor.resolve_with_ctx(info, &self.name),
// You pass a vector of User objects to `executor.resolve`, and it
// will determine which fields of the sub-objects to actually
// resolve based on the query. The executor instance keeps track
// of its current position in the query.
"friends" => executor.resolve(info,
&self.friend_ids.iter()
.filter_map(|id| database.users.get(id))
.collect::<Vec<_>>()
),
// We can only reach this panic in two cases; either a mismatch
// between the defined schema in `meta()` above, or a validation
// in this library failed because of a bug.
//
// In either of those two cases, the only reasonable way out is
// to panic the thread.
_ => panic!("Field {} not found on type User", field_name),
}
}
}
```
*/
pub trait GraphQLType: Sized {
/// The expected context type for this GraphQL type
///
/// The context is threaded through query execution to all affected nodes,
/// and can be used to hold common data, e.g. database connections or
/// request session information.
type Context;
/// Type that may carry additional schema information
///
/// This can be used to implement a schema that is partly dynamic,
/// meaning that it can use information that is not known at compile time,
/// for instance by reading it from a configuration file at start-up.
type TypeInfo;
/// The name of the GraphQL type to expose.
///
/// This function will be called multiple times during schema construction.
/// It must _not_ perform any calculation and _always_ return the same
/// value.
fn name(info: &Self::TypeInfo) -> Option<&str>;
/// The meta type representing this GraphQL type.
fn meta<'r>(info: &Self::TypeInfo, registry: &mut Registry<'r>) -> MetaType<'r>;
/// Resolve the value of a single field on this type.
///
/// The arguments object contain all specified arguments, with default
/// values substituted for the ones not provided by the query.
///
/// The executor can be used to drive selections into sub-objects.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn resolve_field(
&self,
info: &Self::TypeInfo,
field_name: &str,
arguments: &Arguments,
executor: &Executor<Self::Context>,
) -> ExecutionResult {
panic!("resolve_field must be implemented by object types");
}
/// Resolve this interface or union into a concrete type
///
/// Try to resolve the current type into the type name provided. If the
/// type matches, pass the instance along to `executor.resolve`.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn resolve_into_type(
&self,
info: &Self::TypeInfo,
type_name: &str,
selection_set: Option<&[Selection]>,
executor: &Executor<Self::Context>,
) -> ExecutionResult {
if Self::name(info).unwrap() == type_name {
Ok(self.resolve(info, selection_set, executor))
} else {
panic!("resolve_into_type must be implemented by unions and interfaces");
}
}
/// Return the concrete type name for this instance/union.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn concrete_type_name(&self, context: &Self::Context) -> String {
panic!("concrete_type_name must be implemented by unions and interfaces");
}
/// Resolve the provided selection set against the current object.
///
/// For non-object types, the selection set will be `None` and the value
/// of the object should simply be returned.
///
/// For objects, all fields in the selection set should be resolved.
///
/// The default implementation uses `resolve_field` to resolve all fields,
/// including those through fragment expansion, for object types. For
/// non-object types, this method panics.
fn resolve(
&self,
info: &Self::TypeInfo,
selection_set: Option<&[Selection]>,
executor: &Executor<Self::Context>,
) -> Value {
if let Some(selection_set) = selection_set {
let mut result = OrderMap::new();
if resolve_selection_set_into(self, info, selection_set, executor, &mut result) {
Value::object(result)
} else {
Value::null()
}
} else {
panic!("resolve() must be implemented by non-object output types");
}
}
}
fn resolve_selection_set_into<T, CtxT>(
instance: &T,
info: &T::TypeInfo,
selection_set: &[Selection],
executor: &Executor<CtxT>,
result: &mut OrderMap<String, Value>,
) -> bool where
T: GraphQLType<Context = CtxT>,
{
let meta_type = executor
.schema()
.concrete_type_by_name(
T::name(info)
.expect("Resolving named type's selection set")
.as_ref(),
)
.expect("Type not found in schema");
for selection in selection_set {
match *selection {
Selection::Field(Spanning {
item: ref f,
start: ref start_pos,
..
}) => {
if is_excluded(&f.directives, executor.variables()) {
continue;
}
let response_name = &f.alias.as_ref().unwrap_or(&f.name).item;
if f.name.item == "__typename" {
result.insert(
(*response_name).to_owned(),
Value::string(instance.concrete_type_name(executor.context())),
);
continue;
}
let meta_field = meta_type.field_by_name(f.name.item).unwrap_or_else(|| {
panic!(format!(
"Field {} not found on type {:?}",
f.name.item,
meta_type.name()
))
});
let exec_vars = executor.variables();
let sub_exec = executor.field_sub_executor(
response_name,
&f.name.item,
start_pos.clone(),
f.selection_set.as_ref().map(|v| &v[..]),
);
let field_result = instance.resolve_field(
info,
f.name.item,
&Arguments::new(
f.arguments.as_ref().map(|m| {
m.item
.iter()
.map(|&(ref k, ref v)| {
(k.item, v.item.clone().into_const(exec_vars))
})
.collect()
}),
&meta_field.arguments,
),
&sub_exec,
);
match field_result {
Ok(Value::Null) if meta_field.field_type.is_non_null() => return false,
Ok(v) => merge_key_into(result, response_name, v),
Err(e) => {
sub_exec.push_error_at(e, start_pos.clone());
if meta_field.field_type.is_non_null() {
return false;
}
result.insert((*response_name).to_owned(), Value::null());
}
}
}
Selection::FragmentSpread(Spanning {
item: ref spread, ..
}) => {
if is_excluded(&spread.directives, executor.variables()) {
continue;
}
let fragment = &executor
.fragment_by_name(spread.name.item)
.expect("Fragment could not be found");
if !resolve_selection_set_into(
instance,
info,
&fragment.selection_set[..],
executor,
result,
) {
return false;
}
}
Selection::InlineFragment(Spanning {
item: ref fragment,
start: ref start_pos,
..
}) => {
if is_excluded(&fragment.directives, executor.variables()) {
continue;
}
let sub_exec = executor.type_sub_executor(
fragment.type_condition.as_ref().map(|c| c.item),
Some(&fragment.selection_set[..]));
if let Some(ref type_condition) = fragment.type_condition {
let sub_result = instance.resolve_into_type(
info,
type_condition.item,
Some(&fragment.selection_set[..]),
&sub_exec,
);
if let Ok(Value::Object(mut hash_map)) = sub_result {
for (k, v) in hash_map.drain(..) {
result.insert(k, v);
}
} else if let Err(e) = sub_result {
sub_exec.push_error_at(e, start_pos.clone());
}
} else {
if !resolve_selection_set_into(
instance,
info,
&fragment.selection_set[..],
&sub_exec,
result,
) {
return false;
}
}
}
}
}
true
}
fn is_excluded(directives: &Option<Vec<Spanning<Directive>>>, vars: &Variables) -> bool {
if let Some(ref directives) = *directives {
for &Spanning {
item: ref directive,
..
} in directives
{
let condition: bool = directive
.arguments
.iter()
.flat_map(|m| m.item.get("if"))
.flat_map(|v| v.item.clone().into_const(vars).convert())
.next()
.unwrap();
if (directive.name.item == "skip" && condition) ||
(directive.name.item == "include" && !condition)
{
return true;
}
}
}
false
}
fn merge_key_into(result: &mut OrderMap<String, Value>, response_name: &str, value: Value) {
match result.entry(response_name.to_owned()) {
Entry::Occupied(mut e) => match (e.get_mut().as_mut_object_value(), value) {
(Some(dest_obj), Value::Object(src_obj)) => {
merge_maps(dest_obj, src_obj);
}
_ => {}
},
Entry::Vacant(e) => {
e.insert(value);
}
}
}
fn merge_maps(dest: &mut OrderMap<String, Value>, src: OrderMap<String, Value>) {
for (key, value) in src {
if dest.contains_key(&key) {
merge_key_into(dest, &key, value);
} else {
dest.insert(key, value);
}
}
}
Fixed case where .unwrap was being called instead of the Option just being propagated up the call chain
use ordermap::OrderMap;
use ordermap::Entry;
use ast::{Directive, FromInputValue, InputValue, Selection};
use executor::Variables;
use value::Value;
use schema::meta::{Argument, MetaType};
use executor::{ExecutionResult, Executor, Registry};
use parser::Spanning;
/// GraphQL type kind
///
/// The GraphQL specification defines a number of type kinds - the meta type
/// of a type.
#[derive(GraphQLEnum, Clone, Eq, PartialEq, Debug)]
// Note: _internal flag needed to make derive work in juniper crate itself.
#[graphql(name = "__TypeKind", _internal)]
pub enum TypeKind {
/// ## Scalar types
///
/// Scalar types appear as the leaf nodes of GraphQL queries. Strings,
/// numbers, and booleans are the built in types, and while it's possible
/// to define your own, it's relatively uncommon.
Scalar,
/// ## Object types
///
/// The most common type to be implemented by users. Objects have fields
/// and can implement interfaces.
Object,
/// ## Interface types
///
/// Interface types are used to represent overlapping fields between
/// multiple types, and can be queried for their concrete type.
Interface,
/// ## Union types
///
/// Unions are similar to interfaces but can not contain any fields on
/// their own.
Union,
/// ## Enum types
///
/// Like scalars, enum types appear as the leaf nodes of GraphQL queries.
Enum,
/// ## Input objects
///
/// Represents complex values provided in queries _into_ the system.
#[graphql(name = "INPUT_OBJECT")]
InputObject,
/// ## List types
///
/// Represent lists of other types. This library provides implementations
/// for vectors and slices, but other Rust types can be extended to serve
/// as GraphQL lists.
List,
/// ## Non-null types
///
/// In GraphQL, nullable types are the default. By putting a `!` after a
/// type, it becomes non-nullable.
#[graphql(name = "NON_NULL")]
NonNull,
}
/// Field argument container
pub struct Arguments<'a> {
args: Option<OrderMap<&'a str, InputValue>>,
}
impl<'a> Arguments<'a> {
#[doc(hidden)]
pub fn new(
mut args: Option<OrderMap<&'a str, InputValue>>,
meta_args: &'a Option<Vec<Argument>>,
) -> Arguments<'a> {
if meta_args.is_some() && args.is_none() {
args = Some(OrderMap::new());
}
if let (&mut Some(ref mut args), &Some(ref meta_args)) = (&mut args, meta_args) {
for arg in meta_args {
if !args.contains_key(arg.name.as_str()) || args[arg.name.as_str()].is_null() {
if let Some(ref default_value) = arg.default_value {
args.insert(arg.name.as_str(), default_value.clone());
} else {
args.insert(arg.name.as_str(), InputValue::null());
}
}
}
}
Arguments { args: args }
}
/// Get and convert an argument into the desired type.
///
/// If the argument is found, or a default argument has been provided,
/// the `InputValue` will be converted into the type `T`.
///
/// Returns `Some` if the argument is present _and_ type conversion
/// succeeeds.
pub fn get<T>(&self, key: &str) -> Option<T>
where
T: FromInputValue,
{
match self.args {
Some(ref args) => match args.get(key) {
Some(v) => v.convert(),
None => None,
},
None => None,
}
}
}
/**
Primary trait used to expose Rust types in a GraphQL schema
All of the convenience macros ultimately expand into an implementation of
this trait for the given type. The macros remove duplicated definitions of
fields and arguments, and add type checks on all resolve functions
automatically. This can all be done manually.
`GraphQLType` provides _some_ convenience methods for you, in the form of
optional trait methods. The `name` and `meta` methods are mandatory, but
other than that, it depends on what type you're exposing:
* Scalars, enums, lists and non null wrappers only require `resolve`,
* Interfaces and objects require `resolve_field` _or_ `resolve` if you want
to implement custom resolution logic (probably not),
* Interfaces and unions require `resolve_into_type` and `concrete_type_name`.
* Input objects do not require anything
## Example
Manually deriving an object is straightforward but tedious. This is the
equivalent of the `User` object as shown in the example in the documentation
root:
```rust
use juniper::{GraphQLType, Registry, FieldResult, Context,
Arguments, Executor, ExecutionResult};
use juniper::meta::MetaType;
# use std::collections::HashMap;
struct User { id: String, name: String, friend_ids: Vec<String> }
struct Database { users: HashMap<String, User> }
impl Context for Database {}
impl GraphQLType for User {
type Context = Database;
type TypeInfo = ();
fn name(_: &()) -> Option<&'static str> {
Some("User")
}
fn meta<'r>(_: &(), registry: &mut Registry<'r>) -> MetaType<'r> {
// First, we need to define all fields and their types on this type.
//
// If we need arguments, want to implement interfaces, or want to add
// documentation strings, we can do it here.
let fields = &[
registry.field::<&String>("id", &()),
registry.field::<&String>("name", &()),
registry.field::<Vec<&User>>("friends", &()),
];
registry.build_object_type::<User>(&(), fields).into_meta()
}
fn resolve_field(
&self,
info: &(),
field_name: &str,
args: &Arguments,
executor: &Executor<Database>
)
-> ExecutionResult
{
// Next, we need to match the queried field name. All arms of this
// match statement return `ExecutionResult`, which makes it hard to
// statically verify that the type you pass on to `executor.resolve*`
// actually matches the one that you defined in `meta()` above.
let database = executor.context();
match field_name {
// Because scalars are defined with another `Context` associated
// type, you must use resolve_with_ctx here to make the executor
// perform automatic type conversion of its argument.
"id" => executor.resolve_with_ctx(info, &self.id),
"name" => executor.resolve_with_ctx(info, &self.name),
// You pass a vector of User objects to `executor.resolve`, and it
// will determine which fields of the sub-objects to actually
// resolve based on the query. The executor instance keeps track
// of its current position in the query.
"friends" => executor.resolve(info,
&self.friend_ids.iter()
.filter_map(|id| database.users.get(id))
.collect::<Vec<_>>()
),
// We can only reach this panic in two cases; either a mismatch
// between the defined schema in `meta()` above, or a validation
// in this library failed because of a bug.
//
// In either of those two cases, the only reasonable way out is
// to panic the thread.
_ => panic!("Field {} not found on type User", field_name),
}
}
}
```
*/
pub trait GraphQLType: Sized {
/// The expected context type for this GraphQL type
///
/// The context is threaded through query execution to all affected nodes,
/// and can be used to hold common data, e.g. database connections or
/// request session information.
type Context;
/// Type that may carry additional schema information
///
/// This can be used to implement a schema that is partly dynamic,
/// meaning that it can use information that is not known at compile time,
/// for instance by reading it from a configuration file at start-up.
type TypeInfo;
/// The name of the GraphQL type to expose.
///
/// This function will be called multiple times during schema construction.
/// It must _not_ perform any calculation and _always_ return the same
/// value.
fn name(info: &Self::TypeInfo) -> Option<&str>;
/// The meta type representing this GraphQL type.
fn meta<'r>(info: &Self::TypeInfo, registry: &mut Registry<'r>) -> MetaType<'r>;
/// Resolve the value of a single field on this type.
///
/// The arguments object contain all specified arguments, with default
/// values substituted for the ones not provided by the query.
///
/// The executor can be used to drive selections into sub-objects.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn resolve_field(
&self,
info: &Self::TypeInfo,
field_name: &str,
arguments: &Arguments,
executor: &Executor<Self::Context>,
) -> ExecutionResult {
panic!("resolve_field must be implemented by object types");
}
/// Resolve this interface or union into a concrete type
///
/// Try to resolve the current type into the type name provided. If the
/// type matches, pass the instance along to `executor.resolve`.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn resolve_into_type(
&self,
info: &Self::TypeInfo,
type_name: &str,
selection_set: Option<&[Selection]>,
executor: &Executor<Self::Context>,
) -> ExecutionResult {
if Self::name(info).unwrap() == type_name {
Ok(self.resolve(info, selection_set, executor))
} else {
panic!("resolve_into_type must be implemented by unions and interfaces");
}
}
/// Return the concrete type name for this instance/union.
///
/// The default implementation panics.
#[allow(unused_variables)]
fn concrete_type_name(&self, context: &Self::Context) -> String {
panic!("concrete_type_name must be implemented by unions and interfaces");
}
/// Resolve the provided selection set against the current object.
///
/// For non-object types, the selection set will be `None` and the value
/// of the object should simply be returned.
///
/// For objects, all fields in the selection set should be resolved.
///
/// The default implementation uses `resolve_field` to resolve all fields,
/// including those through fragment expansion, for object types. For
/// non-object types, this method panics.
fn resolve(
&self,
info: &Self::TypeInfo,
selection_set: Option<&[Selection]>,
executor: &Executor<Self::Context>,
) -> Value {
if let Some(selection_set) = selection_set {
let mut result = OrderMap::new();
if resolve_selection_set_into(self, info, selection_set, executor, &mut result) {
Value::object(result)
} else {
Value::null()
}
} else {
panic!("resolve() must be implemented by non-object output types");
}
}
}
fn resolve_selection_set_into<T, CtxT>(
instance: &T,
info: &T::TypeInfo,
selection_set: &[Selection],
executor: &Executor<CtxT>,
result: &mut OrderMap<String, Value>,
) -> bool where
T: GraphQLType<Context = CtxT>,
{
let meta_type = executor
.schema()
.concrete_type_by_name(
T::name(info)
.expect("Resolving named type's selection set")
.as_ref(),
)
.expect("Type not found in schema");
for selection in selection_set {
match *selection {
Selection::Field(Spanning {
item: ref f,
start: ref start_pos,
..
}) => {
if is_excluded(&f.directives, executor.variables()) {
continue;
}
let response_name = &f.alias.as_ref().unwrap_or(&f.name).item;
if f.name.item == "__typename" {
result.insert(
(*response_name).to_owned(),
Value::string(instance.concrete_type_name(executor.context())),
);
continue;
}
let meta_field = meta_type.field_by_name(f.name.item).unwrap_or_else(|| {
panic!(format!(
"Field {} not found on type {:?}",
f.name.item,
meta_type.name()
))
});
let exec_vars = executor.variables();
let sub_exec = executor.field_sub_executor(
response_name,
&f.name.item,
start_pos.clone(),
f.selection_set.as_ref().map(|v| &v[..]),
);
let field_result = instance.resolve_field(
info,
f.name.item,
&Arguments::new(
f.arguments.as_ref().map(|m| {
m.item
.iter()
.map(|&(ref k, ref v)| {
(k.item, v.item.clone().into_const(exec_vars))
})
.collect()
}),
&meta_field.arguments,
),
&sub_exec,
);
match field_result {
Ok(Value::Null) if meta_field.field_type.is_non_null() => return false,
Ok(v) => merge_key_into(result, response_name, v),
Err(e) => {
sub_exec.push_error_at(e, start_pos.clone());
if meta_field.field_type.is_non_null() {
return false;
}
result.insert((*response_name).to_owned(), Value::null());
}
}
}
Selection::FragmentSpread(Spanning {
item: ref spread, ..
}) => {
if is_excluded(&spread.directives, executor.variables()) {
continue;
}
let fragment = &executor
.fragment_by_name(spread.name.item)
.expect("Fragment could not be found");
if !resolve_selection_set_into(
instance,
info,
&fragment.selection_set[..],
executor,
result,
) {
return false;
}
}
Selection::InlineFragment(Spanning {
item: ref fragment,
start: ref start_pos,
..
}) => {
if is_excluded(&fragment.directives, executor.variables()) {
continue;
}
let sub_exec = executor.type_sub_executor(
fragment.type_condition.as_ref().map(|c| c.item),
Some(&fragment.selection_set[..]));
if let Some(ref type_condition) = fragment.type_condition {
let sub_result = instance.resolve_into_type(
info,
type_condition.item,
Some(&fragment.selection_set[..]),
&sub_exec,
);
if let Ok(Value::Object(mut hash_map)) = sub_result {
for (k, v) in hash_map.drain(..) {
result.insert(k, v);
}
} else if let Err(e) = sub_result {
sub_exec.push_error_at(e, start_pos.clone());
}
} else {
if !resolve_selection_set_into(
instance,
info,
&fragment.selection_set[..],
&sub_exec,
result,
) {
return false;
}
}
}
}
}
true
}
fn is_excluded(directives: &Option<Vec<Spanning<Directive>>>, vars: &Variables) -> bool {
if let Some(ref directives) = *directives {
for &Spanning {
item: ref directive,
..
} in directives
{
let condition: bool = directive
.arguments
.iter()
.flat_map(|m| m.item.get("if"))
.flat_map(|v| v.item.clone().into_const(vars).convert())
.next()
.unwrap();
if (directive.name.item == "skip" && condition) ||
(directive.name.item == "include" && !condition)
{
return true;
}
}
}
false
}
fn merge_key_into(result: &mut OrderMap<String, Value>, response_name: &str, value: Value) {
match result.entry(response_name.to_owned()) {
Entry::Occupied(mut e) => match (e.get_mut().as_mut_object_value(), value) {
(Some(dest_obj), Value::Object(src_obj)) => {
merge_maps(dest_obj, src_obj);
}
_ => {}
},
Entry::Vacant(e) => {
e.insert(value);
}
}
}
fn merge_maps(dest: &mut OrderMap<String, Value>, src: OrderMap<String, Value>) {
for (key, value) in src {
if dest.contains_key(&key) {
merge_key_into(dest, &key, value);
} else {
dest.insert(key, value);
}
}
}
|
/*!
Functionality for declaring Objective-C classes.
Classes can be declared using the `ClassDecl` struct. Instance variables and
methods can then be added before the class is ultimately registered.
# Example
The following example demonstrates declaring a class named `MyNumber` that has
one ivar, a `u32` named `_number` and a `number` method that returns it:
```
# #[macro_use] extern crate objc;
# use objc::declare::ClassDecl;
# use objc::runtime::{Class, Object, Sel};
# fn main() {
let superclass = Class::get("NSObject").unwrap();
let mut decl = ClassDecl::new(superclass, "MyNumber").unwrap();
// Add an instance variable
decl.add_ivar::<u32>("_number");
// Add an ObjC method for getting the number
extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 {
unsafe { *this.get_ivar("_number") }
}
decl.add_method(sel!(number),
my_number_get as extern fn(&Object, Sel) -> u32);
decl.register();
# }
```
*/
use std::ffi::CString;
use std::mem;
use libc::size_t;
use {encode, Encode, EncodePtr, Message};
use runtime::{Class, Imp, Sel, NO, self};
/// Types that can be used as the implementation of an Objective-C method.
pub trait IntoMethodImp {
/// Returns the method type encoding for Self.
fn method_encoding() -> String;
/// Consumes self to create a method implementation for the given selector.
///
/// Returns an error if self and the selector do not accept the same number
/// of arguments.
fn into_imp(self, sel: Sel) -> Result<Imp, ()>;
}
macro_rules! count_idents {
() => (0);
($a:ident) => (1);
($a:ident, $($b:ident),+) => (1 + count_idents!($($b),*));
}
macro_rules! method_decl_impl {
(-$s:ident, $sp:ty, $($t:ident),*) => (
impl<$s, R $(, $t)*> IntoMethodImp for extern fn($sp, Sel $(, $t)*) -> R
where $s: Message + EncodePtr, R: Encode $(, $t: Encode)* {
fn method_encoding() -> String {
let types = [
encode::<R>(),
encode::<$sp>(),
encode::<Sel>(),
$(encode::<$t>()),*
];
types.iter().cloned().collect()
}
fn into_imp(self, sel: Sel) -> Result<Imp, ()> {
let num_args = count_idents!($($t),*);
if sel.name().chars().filter(|&c| c == ':').count() == num_args {
unsafe { Ok(mem::transmute(self)) }
} else {
Err(())
}
}
}
);
($($t:ident),*) => (
method_decl_impl!(-T, &T, $($t),*);
method_decl_impl!(-T, &mut T, $($t),*);
);
}
method_decl_impl!();
method_decl_impl!(A);
method_decl_impl!(A, B);
method_decl_impl!(A, B, C);
method_decl_impl!(A, B, C, D);
method_decl_impl!(A, B, C, D, E);
method_decl_impl!(A, B, C, D, E, F);
method_decl_impl!(A, B, C, D, E, F, G);
method_decl_impl!(A, B, C, D, E, F, G, H);
method_decl_impl!(A, B, C, D, E, F, G, H, I);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L);
/// A type for declaring a new class and adding new methods and ivars to it
/// before registering it.
pub struct ClassDecl {
cls: *mut Class,
}
impl ClassDecl {
/// Constructs a `ClassDecl` with the given superclass and name.
/// Returns `None` if the class couldn't be allocated.
pub fn new(superclass: &Class, name: &str) -> Option<ClassDecl> {
let name = CString::new(name).unwrap();
let cls = unsafe {
runtime::objc_allocateClassPair(superclass, name.as_ptr(), 0)
};
if cls.is_null() {
None
} else {
Some(ClassDecl { cls: cls })
}
}
/// Adds a method with the given name and implementation to self.
/// Panics if the method wasn't sucessfully added
/// or if the selector and function take different numbers of arguments.
pub fn add_method<F>(&mut self, sel: Sel, func: F) where F: IntoMethodImp {
let types = CString::new(F::method_encoding()).unwrap();
let imp = func.into_imp(sel).unwrap();
let success = unsafe {
runtime::class_addMethod(self.cls, sel, imp, types.as_ptr())
};
assert!(success != NO, "Failed to add method {:?}", sel);
}
/// Adds an ivar with type `T` and the provided name to self.
/// Panics if the ivar wasn't successfully added.
pub fn add_ivar<T>(&mut self, name: &str) where T: Encode {
let c_name = CString::new(name).unwrap();
let types = CString::new(encode::<T>()).unwrap();
let size = mem::size_of::<T>() as size_t;
let align = mem::align_of::<T>() as u8;
let success = unsafe {
runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align,
types.as_ptr())
};
assert!(success != NO, "Failed to add ivar {}", name);
}
/// Registers self, consuming it and returning a reference to the
/// newly registered `Class`.
pub fn register(self) -> &'static Class {
unsafe {
let cls = self.cls;
runtime::objc_registerClassPair(cls);
// Forget self otherwise the class will be disposed in drop
mem::forget(self);
&*cls
}
}
}
impl Drop for ClassDecl {
fn drop(&mut self) {
unsafe {
runtime::objc_disposeClassPair(self.cls);
}
}
}
#[cfg(test)]
mod tests {
use runtime::{Object, Sel};
use test_utils;
use super::IntoMethodImp;
#[test]
fn test_custom_class() {
// Registering the custom class is in test_utils
let obj = test_utils::custom_object();
unsafe {
let _: () = msg_send![obj, setFoo:13u32];
let result: u32 = msg_send![obj, foo];
assert!(result == 13);
}
}
#[test]
fn test_mismatched_args() {
extern fn wrong_num_args_method(_obj: &Object, _cmd: Sel, _a: i32) { }
let sel = sel!(doSomethingWithFoo:bar:);
let f: extern fn(&Object, Sel, i32) = wrong_num_args_method;
let imp = f.into_imp(sel);
assert!(imp.is_err());
}
}
Added callee and ret associated types for method imps.
/*!
Functionality for declaring Objective-C classes.
Classes can be declared using the `ClassDecl` struct. Instance variables and
methods can then be added before the class is ultimately registered.
# Example
The following example demonstrates declaring a class named `MyNumber` that has
one ivar, a `u32` named `_number` and a `number` method that returns it:
```
# #[macro_use] extern crate objc;
# use objc::declare::ClassDecl;
# use objc::runtime::{Class, Object, Sel};
# fn main() {
let superclass = Class::get("NSObject").unwrap();
let mut decl = ClassDecl::new(superclass, "MyNumber").unwrap();
// Add an instance variable
decl.add_ivar::<u32>("_number");
// Add an ObjC method for getting the number
extern fn my_number_get(this: &Object, _cmd: Sel) -> u32 {
unsafe { *this.get_ivar("_number") }
}
decl.add_method(sel!(number),
my_number_get as extern fn(&Object, Sel) -> u32);
decl.register();
# }
```
*/
use std::ffi::CString;
use std::mem;
use libc::size_t;
use {encode, Encode, EncodePtr, Message};
use runtime::{Class, Imp, Sel, NO, self};
/// Types that can be used as the implementation of an Objective-C method.
pub trait IntoMethodImp {
/// The callee type of the method.
type Callee: Message;
/// The return type of the method.
type Ret;
/// Returns the method type encoding for Self.
fn method_encoding() -> String;
/// Consumes self to create a method implementation for the given selector.
///
/// Returns an error if self and the selector do not accept the same number
/// of arguments.
fn into_imp(self, sel: Sel) -> Result<Imp, ()>;
}
macro_rules! count_idents {
() => (0);
($a:ident) => (1);
($a:ident, $($b:ident),+) => (1 + count_idents!($($b),*));
}
macro_rules! method_decl_impl {
(-$s:ident, $sp:ty, $($t:ident),*) => (
impl<$s, R $(, $t)*> IntoMethodImp for extern fn($sp, Sel $(, $t)*) -> R
where $s: Message + EncodePtr, R: Encode $(, $t: Encode)* {
type Callee = $s;
type Ret = R;
fn method_encoding() -> String {
let types = [
encode::<R>(),
encode::<$sp>(),
encode::<Sel>(),
$(encode::<$t>()),*
];
types.iter().cloned().collect()
}
fn into_imp(self, sel: Sel) -> Result<Imp, ()> {
let num_args = count_idents!($($t),*);
if sel.name().chars().filter(|&c| c == ':').count() == num_args {
unsafe { Ok(mem::transmute(self)) }
} else {
Err(())
}
}
}
);
($($t:ident),*) => (
method_decl_impl!(-T, &T, $($t),*);
method_decl_impl!(-T, &mut T, $($t),*);
);
}
method_decl_impl!();
method_decl_impl!(A);
method_decl_impl!(A, B);
method_decl_impl!(A, B, C);
method_decl_impl!(A, B, C, D);
method_decl_impl!(A, B, C, D, E);
method_decl_impl!(A, B, C, D, E, F);
method_decl_impl!(A, B, C, D, E, F, G);
method_decl_impl!(A, B, C, D, E, F, G, H);
method_decl_impl!(A, B, C, D, E, F, G, H, I);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K);
method_decl_impl!(A, B, C, D, E, F, G, H, I, J, K, L);
/// A type for declaring a new class and adding new methods and ivars to it
/// before registering it.
pub struct ClassDecl {
cls: *mut Class,
}
impl ClassDecl {
/// Constructs a `ClassDecl` with the given superclass and name.
/// Returns `None` if the class couldn't be allocated.
pub fn new(superclass: &Class, name: &str) -> Option<ClassDecl> {
let name = CString::new(name).unwrap();
let cls = unsafe {
runtime::objc_allocateClassPair(superclass, name.as_ptr(), 0)
};
if cls.is_null() {
None
} else {
Some(ClassDecl { cls: cls })
}
}
/// Adds a method with the given name and implementation to self.
/// Panics if the method wasn't sucessfully added
/// or if the selector and function take different numbers of arguments.
pub fn add_method<F>(&mut self, sel: Sel, func: F) where F: IntoMethodImp {
let types = CString::new(F::method_encoding()).unwrap();
let imp = func.into_imp(sel).unwrap();
let success = unsafe {
runtime::class_addMethod(self.cls, sel, imp, types.as_ptr())
};
assert!(success != NO, "Failed to add method {:?}", sel);
}
/// Adds an ivar with type `T` and the provided name to self.
/// Panics if the ivar wasn't successfully added.
pub fn add_ivar<T>(&mut self, name: &str) where T: Encode {
let c_name = CString::new(name).unwrap();
let types = CString::new(encode::<T>()).unwrap();
let size = mem::size_of::<T>() as size_t;
let align = mem::align_of::<T>() as u8;
let success = unsafe {
runtime::class_addIvar(self.cls, c_name.as_ptr(), size, align,
types.as_ptr())
};
assert!(success != NO, "Failed to add ivar {}", name);
}
/// Registers self, consuming it and returning a reference to the
/// newly registered `Class`.
pub fn register(self) -> &'static Class {
unsafe {
let cls = self.cls;
runtime::objc_registerClassPair(cls);
// Forget self otherwise the class will be disposed in drop
mem::forget(self);
&*cls
}
}
}
impl Drop for ClassDecl {
fn drop(&mut self) {
unsafe {
runtime::objc_disposeClassPair(self.cls);
}
}
}
#[cfg(test)]
mod tests {
use runtime::{Object, Sel};
use test_utils;
use super::IntoMethodImp;
#[test]
fn test_custom_class() {
// Registering the custom class is in test_utils
let obj = test_utils::custom_object();
unsafe {
let _: () = msg_send![obj, setFoo:13u32];
let result: u32 = msg_send![obj, foo];
assert!(result == 13);
}
}
#[test]
fn test_mismatched_args() {
extern fn wrong_num_args_method(_obj: &Object, _cmd: Sel, _a: i32) { }
let sel = sel!(doSomethingWithFoo:bar:);
let f: extern fn(&Object, Sel, i32) = wrong_num_args_method;
let imp = f.into_imp(sel);
assert!(imp.is_err());
}
}
|
use futures_executor::LocalPool;
use lapin::{
message::DeliveryResult,
options::*,
publisher_confirm::Confirmation,
tcp::{Identity, TLSConfig},
types::FieldTable,
BasicProperties, Connection, ConnectionProperties,
};
use log::info;
use std::sync::Arc;
fn get_tls_config() -> TLSConfig<'static, 'static, 'static> {
let cert_chain = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/path/to/ca_certificate.pem"
));
let client_cert_and_key =
include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/path/to/client.pfx"));
let client_cert_and_key_password = "bunnies";
TLSConfig {
identity: Some(Identity {
der: client_cert_and_key,
password: client_cert_and_key_password,
}),
cert_chain: Some(cert_chain),
}
}
fn main() {
env_logger::init();
let addr = std::env::var("AMQP_ADDR")
.unwrap_or_else(|_| "amqps://localhost:5671/%2f?auth_mechanism=external".into());
LocalPool::new().run_until(async {
let conn = Connection::connect_with_config(
&addr,
ConnectionProperties::default(),
get_tls_config(),
)
.await
.expect("connection error");
info!("CONNECTED");
//send channel
let channel_a = conn.create_channel().await.expect("create_channel");
//receive channel
let channel_b = conn.create_channel().await.expect("create_channel");
info!("[{}] state: {:?}", line!(), conn.status().state());
//create the hello queue
let queue = channel_a
.queue_declare(
"hello",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await
.expect("queue_declare");
info!("[{}] state: {:?}", line!(), conn.status().state());
info!("[{}] declared queue: {:?}", line!(), queue);
let channel_b = Arc::new(channel_b);
let chan = channel_b.clone();
info!("will consume");
channel_b
.basic_consume(
"hello",
"my_consumer",
BasicConsumeOptions::default(),
FieldTable::default(),
)
.await
.expect("basic_consume")
.set_delegate(move |delivery: DeliveryResult| {
let chan = chan.clone();
async move {
info!("received message: {:?}", delivery);
if let Ok(Some(delivery)) = delivery {
chan.basic_ack(delivery.delivery_tag, BasicAckOptions::default())
.await
.expect("basic_ack");
}
}
})
.expect("set_delegate");
info!("[{}] state: {:?}", line!(), conn.status().state());
info!("will publish");
let payload = b"Hello world!";
let confirm = channel_a
.basic_publish(
"",
"hello",
BasicPublishOptions::default(),
payload.to_vec(),
BasicProperties::default(),
)
.await
.expect("basic_publish")
.await
.expect("publisher-confirms");
assert_eq!(confirm, Confirmation::NotRequested);
info!("[{}] state: {:?}", line!(), conn.status().state());
})
}
make the CI pass
Signed-off-by: Marc-Antoine Perennou <07f76cf0511c79b361712839686f3cee8c75791c@Perennou.com>
use futures_executor::LocalPool;
use lapin::{
message::DeliveryResult,
options::*,
publisher_confirm::Confirmation,
tcp::{Identity, TLSConfig},
types::FieldTable,
BasicProperties, Connection, ConnectionProperties,
};
use log::info;
use std::sync::Arc;
fn get_tls_config() -> TLSConfig<'static, 'static, 'static> {
let cert_chain = "" /* include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/path/to/ca_certificate.pem"
)) */;
let client_cert_and_key = b""
/* include_bytes!(concat!(env!("CARGO_MANIFEST_DIR"), "/path/to/client.pfx")) */;
let client_cert_and_key_password = "bunnies";
TLSConfig {
identity: Some(Identity {
der: client_cert_and_key,
password: client_cert_and_key_password,
}),
cert_chain: Some(cert_chain),
}
}
fn main() {
env_logger::init();
let addr = std::env::var("AMQP_ADDR")
.unwrap_or_else(|_| "amqps://localhost:5671/%2f?auth_mechanism=external".into());
LocalPool::new().run_until(async {
let conn = Connection::connect_with_config(
&addr,
ConnectionProperties::default(),
get_tls_config(),
)
.await
.expect("connection error");
info!("CONNECTED");
//send channel
let channel_a = conn.create_channel().await.expect("create_channel");
//receive channel
let channel_b = conn.create_channel().await.expect("create_channel");
info!("[{}] state: {:?}", line!(), conn.status().state());
//create the hello queue
let queue = channel_a
.queue_declare(
"hello",
QueueDeclareOptions::default(),
FieldTable::default(),
)
.await
.expect("queue_declare");
info!("[{}] state: {:?}", line!(), conn.status().state());
info!("[{}] declared queue: {:?}", line!(), queue);
let channel_b = Arc::new(channel_b);
let chan = channel_b.clone();
info!("will consume");
channel_b
.basic_consume(
"hello",
"my_consumer",
BasicConsumeOptions::default(),
FieldTable::default(),
)
.await
.expect("basic_consume")
.set_delegate(move |delivery: DeliveryResult| {
let chan = chan.clone();
async move {
info!("received message: {:?}", delivery);
if let Ok(Some(delivery)) = delivery {
chan.basic_ack(delivery.delivery_tag, BasicAckOptions::default())
.await
.expect("basic_ack");
}
}
})
.expect("set_delegate");
info!("[{}] state: {:?}", line!(), conn.status().state());
info!("will publish");
let payload = b"Hello world!";
let confirm = channel_a
.basic_publish(
"",
"hello",
BasicPublishOptions::default(),
payload.to_vec(),
BasicProperties::default(),
)
.await
.expect("basic_publish")
.await
.expect("publisher-confirms");
assert_eq!(confirm, Confirmation::NotRequested);
info!("[{}] state: {:?}", line!(), conn.status().state());
})
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gleam::gl;
use glutin;
use std::env;
use std::path::PathBuf;
use webrender;
use winit;
use webrender::{DebugFlags, ShaderPrecacheFlags};
use webrender::api::*;
use webrender::api::units::*;
struct Notifier {
events_proxy: winit::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: winit::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<dyn RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self,
_: DocumentId,
_scrolled: bool,
_composite_needed: bool,
_render_time: Option<u64>) {
self.wake_up();
}
}
pub trait HandyDandyRectBuilder {
fn to(&self, x2: i32, y2: i32) -> LayoutRect;
fn by(&self, w: i32, h: i32) -> LayoutRect;
}
// Allows doing `(x, y).to(x2, y2)` or `(x, y).by(width, height)` with i32
// values to build a f32 LayoutRect
impl HandyDandyRectBuilder for (i32, i32) {
fn to(&self, x2: i32, y2: i32) -> LayoutRect {
LayoutRect::new(
LayoutPoint::new(self.0 as f32, self.1 as f32),
LayoutSize::new((x2 - self.0) as f32, (y2 - self.1) as f32),
)
}
fn by(&self, w: i32, h: i32) -> LayoutRect {
LayoutRect::new(
LayoutPoint::new(self.0 as f32, self.1 as f32),
LayoutSize::new(w as f32, h as f32),
)
}
}
pub trait Example {
const TITLE: &'static str = "WebRender Sample App";
const PRECACHE_SHADER_FLAGS: ShaderPrecacheFlags = ShaderPrecacheFlags::EMPTY;
const WIDTH: u32 = 1920;
const HEIGHT: u32 = 1080;
fn render(
&mut self,
api: &mut RenderApi,
builder: &mut DisplayListBuilder,
txn: &mut Transaction,
device_size: DeviceIntSize,
pipeline_id: PipelineId,
document_id: DocumentId,
);
fn on_event(
&mut self,
_: winit::WindowEvent,
_: &mut RenderApi,
_: DocumentId,
) -> bool {
false
}
fn get_image_handlers(
&mut self,
_gl: &dyn gl::Gl,
) -> (Option<Box<dyn ExternalImageHandler>>,
Option<Box<dyn OutputImageHandler>>) {
(None, None)
}
fn draw_custom(&mut self, _gl: &dyn gl::Gl) {
}
}
pub fn main_wrapper<E: Example>(
example: &mut E,
options: Option<webrender::RendererOptions>,
) {
env_logger::init();
#[cfg(target_os = "macos")]
{
use core_foundation::{self as cf, base::TCFType};
let i = cf::bundle::CFBundle::main_bundle().info_dictionary();
let mut i = unsafe { i.to_mutable() };
i.set(
cf::string::CFString::new("NSSupportsAutomaticGraphicsSwitching"),
cf::boolean::CFBoolean::true_value().into_CFType(),
);
}
let args: Vec<String> = env::args().collect();
let res_path = if args.len() > 1 {
Some(PathBuf::from(&args[1]))
} else {
None
};
let mut events_loop = winit::EventsLoop::new();
let window_builder = winit::WindowBuilder::new()
.with_title(E::TITLE)
.with_multitouch()
.with_dimensions(winit::dpi::LogicalSize::new(E::WIDTH as f64, E::HEIGHT as f64));
let windowed_context = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
})
.build_windowed(window_builder, &events_loop)
.unwrap();
let windowed_context = unsafe { windowed_context.make_current().unwrap() };
let gl = match windowed_context.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(
|symbol| windowed_context.get_proc_address(symbol) as *const _
)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(
|symbol| windowed_context.get_proc_address(symbol) as *const _
)
},
glutin::Api::WebGl => unimplemented!(),
};
println!("OpenGL version {}", gl.get_string(gl::VERSION));
println!("Shader resource path: {:?}", res_path);
let device_pixel_ratio = windowed_context.window().get_hidpi_factor() as f32;
println!("Device pixel ratio: {}", device_pixel_ratio);
println!("Loading shaders...");
let mut debug_flags = DebugFlags::ECHO_DRIVER_MESSAGES | DebugFlags::TEXTURE_CACHE_DBG;
let opts = webrender::RendererOptions {
resource_override_path: res_path,
precache_flags: E::PRECACHE_SHADER_FLAGS,
device_pixel_ratio,
clear_color: Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
debug_flags,
//allow_texture_swizzling: false,
..options.unwrap_or(webrender::RendererOptions::default())
};
let device_size = {
let size = windowed_context
.window()
.get_inner_size()
.unwrap()
.to_physical(device_pixel_ratio as f64);
DeviceIntSize::new(size.width as i32, size.height as i32)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (mut renderer, sender) = webrender::Renderer::new(
gl.clone(),
notifier,
opts,
None,
device_size,
).unwrap();
let mut api = sender.create_api();
let document_id = api.add_document(device_size, 0);
let (external, output) = example.get_image_handlers(&*gl);
if let Some(output_image_handler) = output {
renderer.set_output_image_handler(output_image_handler);
}
if let Some(external_image_handler) = external {
renderer.set_external_image_handler(external_image_handler);
}
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let layout_size = device_size.to_f32() / euclid::Scale::new(device_pixel_ratio);
let mut builder = DisplayListBuilder::new(pipeline_id, layout_size);
let mut txn = Transaction::new();
example.render(
&mut api,
&mut builder,
&mut txn,
device_size,
pipeline_id,
document_id,
);
txn.set_display_list(
epoch,
Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(pipeline_id);
txn.generate_frame();
api.send_transaction(document_id, txn);
println!("Entering event loop");
events_loop.run_forever(|global_event| {
let mut txn = Transaction::new();
let mut custom_event = true;
let old_flags = debug_flags;
let win_event = match global_event {
winit::Event::WindowEvent { event, .. } => event,
_ => return winit::ControlFlow::Continue,
};
match win_event {
winit::WindowEvent::CloseRequested => return winit::ControlFlow::Break,
// skip high-frequency events
winit::WindowEvent::AxisMotion { .. } |
winit::WindowEvent::CursorMoved { .. } => return winit::ControlFlow::Continue,
winit::WindowEvent::KeyboardInput {
input: winit::KeyboardInput {
state: winit::ElementState::Pressed,
virtual_keycode: Some(key),
..
},
..
} => match key {
winit::VirtualKeyCode::Escape => return winit::ControlFlow::Break,
winit::VirtualKeyCode::P => debug_flags.toggle(DebugFlags::PROFILER_DBG),
winit::VirtualKeyCode::O => debug_flags.toggle(DebugFlags::RENDER_TARGET_DBG),
winit::VirtualKeyCode::I => debug_flags.toggle(DebugFlags::TEXTURE_CACHE_DBG),
winit::VirtualKeyCode::S => debug_flags.toggle(DebugFlags::COMPACT_PROFILER),
winit::VirtualKeyCode::T => debug_flags.toggle(DebugFlags::PICTURE_CACHING_DBG),
winit::VirtualKeyCode::Q => debug_flags.toggle(
DebugFlags::GPU_TIME_QUERIES | DebugFlags::GPU_SAMPLE_QUERIES
),
winit::VirtualKeyCode::F => debug_flags.toggle(
DebugFlags::NEW_FRAME_INDICATOR | DebugFlags::NEW_SCENE_INDICATOR
),
winit::VirtualKeyCode::G => debug_flags.toggle(DebugFlags::GPU_CACHE_DBG),
winit::VirtualKeyCode::Key1 => txn.set_document_view(
device_size.into(),
1.0
),
winit::VirtualKeyCode::Key2 => txn.set_document_view(
device_size.into(),
2.0
),
winit::VirtualKeyCode::M => api.notify_memory_pressure(),
winit::VirtualKeyCode::C => {
let path: PathBuf = "../captures/example".into();
//TODO: switch between SCENE/FRAME capture types
// based on "shift" modifier, when `glutin` is updated.
let bits = CaptureBits::all();
api.save_capture(path, bits);
},
_ => {
custom_event = example.on_event(
win_event,
&mut api,
document_id,
)
},
},
other => custom_event = example.on_event(
other,
&mut api,
document_id,
),
};
if debug_flags != old_flags {
api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
}
if custom_event {
let mut builder = DisplayListBuilder::new(pipeline_id, layout_size);
example.render(
&mut api,
&mut builder,
&mut txn,
device_size,
pipeline_id,
document_id,
);
txn.set_display_list(
epoch,
Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
layout_size,
builder.finalize(),
true,
);
txn.generate_frame();
}
api.send_transaction(document_id, txn);
renderer.update();
renderer.render(device_size).unwrap();
let _ = renderer.flush_pipeline_info();
example.draw_custom(&*gl);
windowed_context.swap_buffers().ok();
winit::ControlFlow::Continue
});
renderer.deinit();
}
Bug 1625693 - Fix mouse movement position tracking in examples. r=kats
[import_pr] From https://github.com/servo/webrender/pull/3961
Differential Revision: https://phabricator.services.mozilla.com/D76716
[ghsync] From https://hg.mozilla.org/mozilla-central/rev/7ced9547a2b33307774e5292aa9f787109633267
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use gleam::gl;
use glutin;
use std::env;
use std::path::PathBuf;
use webrender;
use winit;
use webrender::{DebugFlags, ShaderPrecacheFlags};
use webrender::api::*;
use webrender::api::units::*;
struct Notifier {
events_proxy: winit::EventsLoopProxy,
}
impl Notifier {
fn new(events_proxy: winit::EventsLoopProxy) -> Notifier {
Notifier { events_proxy }
}
}
impl RenderNotifier for Notifier {
fn clone(&self) -> Box<dyn RenderNotifier> {
Box::new(Notifier {
events_proxy: self.events_proxy.clone(),
})
}
fn wake_up(&self) {
#[cfg(not(target_os = "android"))]
let _ = self.events_proxy.wakeup();
}
fn new_frame_ready(&self,
_: DocumentId,
_scrolled: bool,
_composite_needed: bool,
_render_time: Option<u64>) {
self.wake_up();
}
}
pub trait HandyDandyRectBuilder {
fn to(&self, x2: i32, y2: i32) -> LayoutRect;
fn by(&self, w: i32, h: i32) -> LayoutRect;
}
// Allows doing `(x, y).to(x2, y2)` or `(x, y).by(width, height)` with i32
// values to build a f32 LayoutRect
impl HandyDandyRectBuilder for (i32, i32) {
fn to(&self, x2: i32, y2: i32) -> LayoutRect {
LayoutRect::new(
LayoutPoint::new(self.0 as f32, self.1 as f32),
LayoutSize::new((x2 - self.0) as f32, (y2 - self.1) as f32),
)
}
fn by(&self, w: i32, h: i32) -> LayoutRect {
LayoutRect::new(
LayoutPoint::new(self.0 as f32, self.1 as f32),
LayoutSize::new(w as f32, h as f32),
)
}
}
pub trait Example {
const TITLE: &'static str = "WebRender Sample App";
const PRECACHE_SHADER_FLAGS: ShaderPrecacheFlags = ShaderPrecacheFlags::EMPTY;
const WIDTH: u32 = 1920;
const HEIGHT: u32 = 1080;
fn render(
&mut self,
api: &mut RenderApi,
builder: &mut DisplayListBuilder,
txn: &mut Transaction,
device_size: DeviceIntSize,
pipeline_id: PipelineId,
document_id: DocumentId,
);
fn on_event(
&mut self,
_: winit::WindowEvent,
_: &mut RenderApi,
_: DocumentId,
) -> bool {
false
}
fn get_image_handlers(
&mut self,
_gl: &dyn gl::Gl,
) -> (Option<Box<dyn ExternalImageHandler>>,
Option<Box<dyn OutputImageHandler>>) {
(None, None)
}
fn draw_custom(&mut self, _gl: &dyn gl::Gl) {
}
}
pub fn main_wrapper<E: Example>(
example: &mut E,
options: Option<webrender::RendererOptions>,
) {
env_logger::init();
#[cfg(target_os = "macos")]
{
use core_foundation::{self as cf, base::TCFType};
let i = cf::bundle::CFBundle::main_bundle().info_dictionary();
let mut i = unsafe { i.to_mutable() };
i.set(
cf::string::CFString::new("NSSupportsAutomaticGraphicsSwitching"),
cf::boolean::CFBoolean::true_value().into_CFType(),
);
}
let args: Vec<String> = env::args().collect();
let res_path = if args.len() > 1 {
Some(PathBuf::from(&args[1]))
} else {
None
};
let mut events_loop = winit::EventsLoop::new();
let window_builder = winit::WindowBuilder::new()
.with_title(E::TITLE)
.with_multitouch()
.with_dimensions(winit::dpi::LogicalSize::new(E::WIDTH as f64, E::HEIGHT as f64));
let windowed_context = glutin::ContextBuilder::new()
.with_gl(glutin::GlRequest::GlThenGles {
opengl_version: (3, 2),
opengles_version: (3, 0),
})
.build_windowed(window_builder, &events_loop)
.unwrap();
let windowed_context = unsafe { windowed_context.make_current().unwrap() };
let gl = match windowed_context.get_api() {
glutin::Api::OpenGl => unsafe {
gl::GlFns::load_with(
|symbol| windowed_context.get_proc_address(symbol) as *const _
)
},
glutin::Api::OpenGlEs => unsafe {
gl::GlesFns::load_with(
|symbol| windowed_context.get_proc_address(symbol) as *const _
)
},
glutin::Api::WebGl => unimplemented!(),
};
println!("OpenGL version {}", gl.get_string(gl::VERSION));
println!("Shader resource path: {:?}", res_path);
let device_pixel_ratio = windowed_context.window().get_hidpi_factor() as f32;
println!("Device pixel ratio: {}", device_pixel_ratio);
println!("Loading shaders...");
let mut debug_flags = DebugFlags::ECHO_DRIVER_MESSAGES | DebugFlags::TEXTURE_CACHE_DBG;
let opts = webrender::RendererOptions {
resource_override_path: res_path,
precache_flags: E::PRECACHE_SHADER_FLAGS,
device_pixel_ratio,
clear_color: Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
debug_flags,
//allow_texture_swizzling: false,
..options.unwrap_or(webrender::RendererOptions::default())
};
let device_size = {
let size = windowed_context
.window()
.get_inner_size()
.unwrap()
.to_physical(device_pixel_ratio as f64);
DeviceIntSize::new(size.width as i32, size.height as i32)
};
let notifier = Box::new(Notifier::new(events_loop.create_proxy()));
let (mut renderer, sender) = webrender::Renderer::new(
gl.clone(),
notifier,
opts,
None,
device_size,
).unwrap();
let mut api = sender.create_api();
let document_id = api.add_document(device_size, 0);
let (external, output) = example.get_image_handlers(&*gl);
if let Some(output_image_handler) = output {
renderer.set_output_image_handler(output_image_handler);
}
if let Some(external_image_handler) = external {
renderer.set_external_image_handler(external_image_handler);
}
let epoch = Epoch(0);
let pipeline_id = PipelineId(0, 0);
let layout_size = device_size.to_f32() / euclid::Scale::new(device_pixel_ratio);
let mut builder = DisplayListBuilder::new(pipeline_id, layout_size);
let mut txn = Transaction::new();
example.render(
&mut api,
&mut builder,
&mut txn,
device_size,
pipeline_id,
document_id,
);
txn.set_display_list(
epoch,
Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
layout_size,
builder.finalize(),
true,
);
txn.set_root_pipeline(pipeline_id);
txn.generate_frame();
api.send_transaction(document_id, txn);
println!("Entering event loop");
events_loop.run_forever(|global_event| {
let mut txn = Transaction::new();
let mut custom_event = true;
let old_flags = debug_flags;
let win_event = match global_event {
winit::Event::WindowEvent { event, .. } => event,
_ => return winit::ControlFlow::Continue,
};
match win_event {
winit::WindowEvent::CloseRequested => return winit::ControlFlow::Break,
winit::WindowEvent::AxisMotion { .. } |
winit::WindowEvent::CursorMoved { .. } => {
custom_event = example.on_event(
win_event,
&mut api,
document_id,
);
// skip high-frequency events from triggering a frame draw.
if !custom_event {
return winit::ControlFlow::Continue;
}
},
winit::WindowEvent::KeyboardInput {
input: winit::KeyboardInput {
state: winit::ElementState::Pressed,
virtual_keycode: Some(key),
..
},
..
} => match key {
winit::VirtualKeyCode::Escape => return winit::ControlFlow::Break,
winit::VirtualKeyCode::P => debug_flags.toggle(DebugFlags::PROFILER_DBG),
winit::VirtualKeyCode::O => debug_flags.toggle(DebugFlags::RENDER_TARGET_DBG),
winit::VirtualKeyCode::I => debug_flags.toggle(DebugFlags::TEXTURE_CACHE_DBG),
winit::VirtualKeyCode::S => debug_flags.toggle(DebugFlags::COMPACT_PROFILER),
winit::VirtualKeyCode::T => debug_flags.toggle(DebugFlags::PICTURE_CACHING_DBG),
winit::VirtualKeyCode::Q => debug_flags.toggle(
DebugFlags::GPU_TIME_QUERIES | DebugFlags::GPU_SAMPLE_QUERIES
),
winit::VirtualKeyCode::F => debug_flags.toggle(
DebugFlags::NEW_FRAME_INDICATOR | DebugFlags::NEW_SCENE_INDICATOR
),
winit::VirtualKeyCode::G => debug_flags.toggle(DebugFlags::GPU_CACHE_DBG),
winit::VirtualKeyCode::Key1 => txn.set_document_view(
device_size.into(),
1.0
),
winit::VirtualKeyCode::Key2 => txn.set_document_view(
device_size.into(),
2.0
),
winit::VirtualKeyCode::M => api.notify_memory_pressure(),
winit::VirtualKeyCode::C => {
let path: PathBuf = "../captures/example".into();
//TODO: switch between SCENE/FRAME capture types
// based on "shift" modifier, when `glutin` is updated.
let bits = CaptureBits::all();
api.save_capture(path, bits);
},
_ => {
custom_event = example.on_event(
win_event,
&mut api,
document_id,
)
},
},
other => custom_event = example.on_event(
other,
&mut api,
document_id,
),
};
if debug_flags != old_flags {
api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
}
if custom_event {
let mut builder = DisplayListBuilder::new(pipeline_id, layout_size);
example.render(
&mut api,
&mut builder,
&mut txn,
device_size,
pipeline_id,
document_id,
);
txn.set_display_list(
epoch,
Some(ColorF::new(0.3, 0.0, 0.0, 1.0)),
layout_size,
builder.finalize(),
true,
);
txn.generate_frame();
}
api.send_transaction(document_id, txn);
renderer.update();
renderer.render(device_size).unwrap();
let _ = renderer.flush_pipeline_info();
example.draw_custom(&*gl);
windowed_context.swap_buffers().ok();
winit::ControlFlow::Continue
});
renderer.deinit();
}
|
mod markdown {
use pulldown_cmark::Parser;
use pulldown_cmark::Event;
use pulldown_cmark::Tag;
pub struct MarkdownParser<'a> {
text: &'a String
}
impl<'a> MarkdownParser<'a> {
pub fn new(s: &'a String) -> MarkdownParser {
MarkdownParser {
text: s
}
}
pub fn links(&self) -> Vec<String> {
Parser::new(&self.text[..])
.filter_map(|e| {
match e {
Event::Start(t) => Some(t),
Event::End(t) => Some(t),
_ => None
}
})
.filter_map(|tag| {
match tag {
Tag::Link(url, text) => Some((url, text)),
_ => None
}
})
.map(|(url, text)| {
text.into_owned()
}).collect::<Vec<String>>()
}
pub fn codeblocks(&self) -> Vec<String> {
Parser::new(&self.text[..])
.filter_map(|e| {
match e {
Event::Start(t) => Some(t),
Event::End(t) => Some(t),
_ => None
}
})
.filter_map(|tag| {
match tag {
Tag::CodeBlock(text) => Some(text),
_ => None
}
})
.map(|text| {
text.into_owned()
}).collect::<Vec<String>>()
}
}
}
Refactor custom functionality in helper function
mod markdown {
use pulldown_cmark::Parser;
use pulldown_cmark::Event;
use pulldown_cmark::Tag;
pub struct MarkdownParser<'a> {
text: &'a String
}
impl<'a> MarkdownParser<'a> {
pub fn new(s: &'a String) -> MarkdownParser {
MarkdownParser {
text: s
}
}
pub fn links(&self) -> Vec<String> {
self.extract_tag(|tag| {
match tag {
Tag::Link(url, _) => Some(url.into_owned()),
_ => None
}
})
}
pub fn codeblocks(&self) -> Vec<String> {
self.extract_tag(|tag| {
match tag {
Tag::CodeBlock(text) => Some(text.into_owned()),
_ => None
}
})
}
fn extract_tag<F>(&self, f: F) -> Vec<String>
where F: FnMut(Tag) -> Option<String>
{
Parser::new(&self.text[..])
.filter_map(|e| {
match e {
Event::Start(t) | Event::End(t) => Some(t),
_ => None
}
})
.filter_map(f)
.collect::<Vec<String>>()
}
}
}
|
//
// Copyright 2020 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! gRPC server pseudo-Node functionality.
use crate::{
auth::oidc_utils::ClientInfo,
metrics::Metrics,
node::{
grpc::{codec::VecCodec, to_tonic_status},
ConfigurationError, Node,
},
ChannelHalfDirection, RuntimeProxy,
};
use futures_util::stream;
use hyper::service::Service;
use log::{debug, error, info, trace, warn};
use oak_abi::{
label::Label, proto::oak::application::GrpcServerConfiguration, ChannelReadStatus, OakStatus,
};
use oak_services::proto::{
google::rpc,
oak::encap::{GrpcRequest, GrpcResponse},
};
use prost::Message;
use std::{
net::SocketAddr,
task::{Context, Poll},
};
use tokio::sync::oneshot;
use tonic::{
codegen::BoxFuture,
metadata::MetadataMap,
server::{Grpc, ServerStreamingService},
transport::{Identity, NamedService},
};
mod auth;
/// Struct that represents a gRPC server pseudo-Node.
pub struct GrpcServerNode {
/// Pseudo-Node name.
node_name: String,
/// Server address to listen client requests on.
address: SocketAddr,
/// Loaded files containing a server TLS key and certificates.
tls_identity: Identity,
/// OpenID Connect Authentication client information. A value of `None` will mean that the
/// server will not support OpenID Connect authentication.
oidc_client_info: Option<ClientInfo>,
}
/// Checks if port is greater than 1023.
fn check_port(address: &SocketAddr) -> Result<(), ConfigurationError> {
if address.port() > 1023 {
Ok(())
} else {
Err(ConfigurationError::IncorrectPort)
}
}
impl GrpcServerNode {
/// Creates a new [`GrpcServerNode`] instance, but does not start it.
pub fn new(
node_name: &str,
config: GrpcServerConfiguration,
tls_identity: Identity,
oidc_client_info: Option<ClientInfo>,
) -> Result<Self, ConfigurationError> {
let address = config.address.parse()?;
check_port(&address)?;
Ok(Self {
node_name: node_name.to_string(),
address,
tls_identity,
oidc_client_info,
})
}
/// Reads the [`oak_abi::Handle`] for the write half of an invocation from a startup channel.
/// Returns an error if the startup channel couldn't be read, or if the initial message
/// is invalid (doesn't contain exactly one write handle).
fn get_invocation_channel(
runtime: &RuntimeProxy,
startup_handle: oak_abi::Handle,
) -> Result<oak_abi::Handle, OakStatus> {
let read_status = runtime
.wait_on_channels(&[startup_handle])
.map_err(|error| {
error!("Couldn't wait on the initial reader handle: {:?}", error);
OakStatus::ErrInternal
})?;
// TODO(#389): Automatically generate this code.
let invocation_channel = if read_status[0] == ChannelReadStatus::ReadReady {
runtime
.channel_read(startup_handle)
.map_err(|error| {
error!("Couldn't read from the initial reader handle {:?}", error);
OakStatus::ErrInternal
})
.and_then(|message| {
message
.ok_or_else(|| {
error!("Empty message");
OakStatus::ErrInternal
})
.and_then(|m| {
if m.handles.len() == 1 {
let handle = m.handles[0];
match runtime.channel_direction(handle)? {
ChannelHalfDirection::Write => Ok(handle),
ChannelHalfDirection::Read => {
error!(
"gRPC server pseudo-node should receive a writer handle, found reader handle {}",
handle
);
Err(OakStatus::ErrBadHandle)
},
}
} else {
error!(
"gRPC server pseudo-node should receive a single writer handle, found {}",
m.handles.len()
);
Err(OakStatus::ErrInternal)
}
})
})
} else {
error!("Couldn't read channel: {:?}", read_status[0]);
Err(OakStatus::ErrInternal)
}?;
info!(
"Invocation channel write handle received: {}",
invocation_channel
);
Ok(invocation_channel)
}
}
/// Oak Node implementation for the gRPC server.
impl Node for GrpcServerNode {
fn run(
self: Box<Self>,
runtime: RuntimeProxy,
startup_handle: oak_abi::Handle,
notify_receiver: oneshot::Receiver<()>,
) {
// At start-of-day we need/expect to receive a write handle for an invocation channel
// to use for all subsequent activity.
info!("{}: Waiting for invocation channel", self.node_name);
let invocation_channel =
match GrpcServerNode::get_invocation_channel(&runtime, startup_handle) {
Ok(writer) => writer,
Err(status) => {
error!(
"Failed to retrieve invocation channel write handle: {:?}",
status
);
return;
}
};
if let Err(err) = runtime.channel_close(startup_handle) {
error!(
"Failed to close initial inbound channel {}: {:?}",
startup_handle, err
);
}
// Build a service to process incoming authentication gRPC requests.
let auth_handler = match self.oidc_client_info {
Some(auth_config) => auth::oidc_service::build_service(
&auth_config.client_id,
&auth_config.client_secret,
),
// TODO(#1021): Add better handling to cases where the client info is not supplied.
_ => auth::oidc_service::build_service("", ""),
};
// Build a service to process all other incoming HTTP/2 requests.
let generic_handler = HttpRequestHandler {
runtime,
invocation_channel,
};
let server = tonic::transport::Server::builder()
.tls_config(tonic::transport::ServerTlsConfig::new().identity(self.tls_identity))
// The order for adding services are important. The namespaces of the services are
// checked in the reverse order to which it was added. The `generic_handler` should
// be added first so that it is checked last, otherwise it would handle requests
// intended for other services.
.add_service(generic_handler)
.add_service(auth_handler)
.serve_with_shutdown(self.address, async {
// Treat notification failure the same as a notification.
let _ = notify_receiver.await;
});
// Create an Async runtime for executing futures.
// https://docs.rs/tokio/
let mut async_runtime = tokio::runtime::Builder::new()
// Use simple scheduler that runs all tasks on the current-thread.
// https://docs.rs/tokio/0.2.16/tokio/runtime/index.html#basic-scheduler
.basic_scheduler()
// Enables the I/O driver.
// Necessary for using net, process, signal, and I/O types on the Tokio runtime.
.enable_io()
// Enables the time driver.
// Necessary for creating a Tokio Runtime.
.enable_time()
.build()
.expect("Couldn't create Async runtime");
// Start the gRPC server.
info!(
"{}: Starting gRPC server pseudo-Node on: {}",
self.node_name, self.address
);
match async_runtime.block_on(server) {
Err(err) => warn!(
"{}: Error running gRPC server pseudo-Node: {}",
self.node_name, err
),
Ok(()) => {
info!(
"{}: Success running gRPC server pseudo-Node",
self.node_name,
);
}
}
}
}
/// [`HttpRequestHandler`] handles HTTP/2 requests from a client and sends HTTP/2 responses back.
#[derive(Clone)]
struct HttpRequestHandler {
/// Reference to the Runtime in the context of this gRPC server pseudo-Node.
runtime: RuntimeProxy,
/// Channel handle used for writing gRPC invocations.
invocation_channel: oak_abi::Handle,
}
/// Set a mandatory prefix for all gRPC requests processed by a gRPC pseudo-Node.
impl NamedService for HttpRequestHandler {
const NAME: &'static str = "oak";
}
impl Service<http::Request<hyper::Body>> for HttpRequestHandler {
type Response = http::Response<tonic::body::BoxBody>;
type Error = http::Error;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
/// Decodes an unary gRPC request using a [`VecCodec`] and processes it with
/// [`tonic::server::Grpc::unary`] and a [`GrpcInvocationHandler`].
fn call(&mut self, request: http::Request<hyper::Body>) -> Self::Future {
let grpc_handler = GrpcInvocationHandler::new(
self.runtime.clone(),
self.invocation_channel,
request.uri().path().to_string(),
);
let method_name = request.uri().path().to_string();
let metrics_data = self.runtime.metrics_data();
let future = async move {
debug!("Processing HTTP/2 request: {:?}", request);
let mut grpc_service = Grpc::new(VecCodec::default());
let response = grpc_service.server_streaming(grpc_handler, request).await;
debug!("Sending HTTP/2 response: {:?}", response);
let stc = format!("{}", response.status());
metrics_data
.grpc_server_metrics
.grpc_server_handled_total
.with_label_values(&[&method_name, &stc])
.inc();
Ok(response)
};
Box::pin(future)
}
}
impl From<OakLabelError> for tonic::Status {
fn from(v: OakLabelError) -> Self {
match v {
OakLabelError::MissingLabel => tonic::Status::invalid_argument("Missing Oak Label"),
OakLabelError::MultipleLabels => tonic::Status::invalid_argument("Multiple Oak Labels"),
OakLabelError::InvalidLabel => tonic::Status::invalid_argument("Invalid Oak Label"),
}
}
}
enum OakLabelError {
MissingLabel,
MultipleLabels,
InvalidLabel,
}
/// Returns the [`Label`] defined as part of the the metadata of an incoming gRPC request.
///
/// Returns an error if there is not exactly one label specified by the caller:
///
/// - no labels means that the caller did not specify any IFC restrictions, which is probably a
/// mistake
/// - more than one labels means that the caller specified multiple IFC restrictions; if the
/// intention was to allow multiple alternative ones, they need to be combined in a single label,
/// once conjunctions are supported
fn get_oak_label(metadata_map: &MetadataMap) -> Result<Label, OakLabelError> {
let labels = metadata_map
.get_all_bin(oak_abi::OAK_LABEL_GRPC_METADATA_KEY)
.iter()
.collect::<Vec<_>>();
if labels.is_empty() {
warn!(
"incorrect number of gRPC labels found: {}, expected: 1",
labels.len()
);
return Err(OakLabelError::MissingLabel);
}
if labels.len() >= 2 {
warn!(
"incorrect number of gRPC labels found: {}, expected: 1",
labels.len()
);
return Err(OakLabelError::MultipleLabels);
}
let label_bytes = labels[0].to_bytes().map_err(|err| {
warn!("could not convert gRPC label to bytes: {}", err);
OakLabelError::InvalidLabel
})?;
oak_abi::proto::oak::label::Label::decode(label_bytes).map_err(|err| {
warn!("could not parse gRPC label: {}", err);
OakLabelError::InvalidLabel
})
}
/// Handler for an individual gRPC method invocation.
#[derive(Clone)]
struct GrpcInvocationHandler {
/// Reference to the Runtime in the context of this gRPC server pseudo-Node.
runtime: RuntimeProxy,
/// Channel handle used for writing gRPC invocations.
invocation_channel: oak_abi::Handle,
/// Name of the gRPC method being invoked.
method_name: String,
}
impl ServerStreamingService<Vec<u8>> for GrpcInvocationHandler {
type Response = Vec<u8>;
type ResponseStream = std::pin::Pin<
Box<dyn futures_core::Stream<Item = Result<Self::Response, tonic::Status>> + Send + Sync>,
>;
type Future = BoxFuture<tonic::Response<Self::ResponseStream>, tonic::Status>;
fn call(&mut self, request: tonic::Request<Vec<u8>>) -> Self::Future {
let handler = self.clone();
let metrics_data = self.runtime.metrics_data();
// Build a future of type `Future<Output = Result<Self::ResponseStream, tonic::Status>>`
let future = async move {
metrics_data
.grpc_server_metrics
.grpc_server_started_total
.with_label_values(&[&handler.method_name])
.inc();
let oak_label = get_oak_label(request.metadata())?;
info!(
"handling gRPC request; peer address: {}, method: {}, request size: {} bytes, label: {:?}",
// TODO(#1089): Ensure that the client address is available.
request.remote_addr().map(|addr| addr.to_string()).unwrap_or_else(|| "<unknown>".to_string()),
handler.method_name,
request.get_ref().len(),
oak_label
);
// Create an encapsulated gRPC request.
// TODO(#97): Add client-streaming support.
let grpc_request = GrpcRequest {
method_name: handler.method_name.to_string(),
req_msg: request.into_inner(),
last: true,
};
// Inject the encapsulated gRPC request into the Oak Application.
debug!("inject encapsulated request into Oak Node");
let response_iter = handler
.inject_grpc_request(grpc_request, &oak_label)
.map_err(|()| tonic::Status::new(tonic::Code::Internal, ""))?;
// First convert the `Iterator<Item = GrpcResponse>` to an
// `Iterator<Item = Result<Vec<u8>, tonic::Status>`.
let result_iter = response_iter.map(|response| {
debug!("Returning gRPC response: {:?}", response);
match response.status {
None => Ok(response.rsp_msg),
Some(status) if status.code == rpc::Code::Ok as i32 => Ok(response.rsp_msg),
Some(status) => Err(to_tonic_status(status)),
}
});
// Now convert this to a streaming future of type
// `Stream<Item = Result<Vec<u8>, tonic::Status>`
// and then wrap it in Pin<Box<.>> to build a `Self::ResponseStream`.
let result_stream: Self::ResponseStream = Box::pin(stream::iter(result_iter));
// Finally, ensure this block returns an `Ok(tonic::Response<Self::ResponseStream>)`.
Ok(tonic::Response::new(result_stream))
};
Box::pin(future)
}
}
impl GrpcInvocationHandler {
fn new(
runtime: RuntimeProxy,
invocation_channel: oak_abi::Handle,
method_name: String,
) -> Self {
Self {
runtime,
invocation_channel,
method_name,
}
}
/// Send an encapsulated gRPC request into the Oak Application as an invocation.
/// Returns an [`oak_abi::Handle`] for reading gRPC response(s) from.
fn inject_grpc_request(
&self,
request: GrpcRequest,
label: &Label,
) -> Result<GrpcResponseIterator, ()> {
// Create a pair of temporary channels to pass the gRPC request and to receive the response.
// The channel containing the request is created with the label specified by the caller.
// This will fail if the label has a non-empty integrity component.
let (request_writer, request_reader) =
self.runtime.channel_create(&label).map_err(|err| {
warn!("could not create gRPC request channel: {:?}", err);
})?;
let (response_writer, response_reader) = self
.runtime
.channel_create(&Label::public_untrusted())
.map_err(|err| {
warn!("could not create gRPC response channel: {:?}", err);
})?;
// Create an invocation message and attach the method-invocation specific channels to it.
//
// This message should be in sync with the [`oak::grpc::Invocation`] from the Oak SDK:
// the order of the `request_reader` and `response_writer` must be consistent.
let invocation = crate::NodeMessage {
data: vec![],
handles: vec![request_reader, response_writer],
};
// Serialize gRPC request into a message.
let mut message = crate::NodeMessage {
data: vec![],
handles: vec![],
};
request.encode(&mut message.data).map_err(|error| {
error!("Couldn't serialize GrpcRequest message: {}", error);
})?;
// Put the gRPC request message inside the per-invocation request channel.
self.runtime
.channel_write(request_writer, message)
.map_err(|error| {
error!(
"Couldn't write message to the gRPC request channel: {:?}",
error
);
})?;
// Send an invocation message (with attached handles) to the Oak Node.
self.runtime
.channel_write(self.invocation_channel, invocation)
.map_err(|error| {
error!("Couldn't write gRPC invocation message: {:?}", error);
})?;
// Close all local handles except for the one that allows reading responses.
if let Err(err) = self.runtime.channel_close(request_writer) {
error!(
"Failed to close request writer channel for invocation: {:?}",
err
);
}
if let Err(err) = self.runtime.channel_close(request_reader) {
error!(
"Failed to close request reader channel for invocation: {:?}",
err
);
}
if let Err(err) = self.runtime.channel_close(response_writer) {
error!(
"Failed to close response writer channel for invocation: {:?}",
err
);
}
Ok(GrpcResponseIterator::new(
self.runtime.clone(),
response_reader,
self.method_name.clone(),
))
}
}
struct MetricsRecorder {
metrics_data: Metrics,
method_name: String,
msg_count: u32,
_timer: prometheus::HistogramTimer,
}
impl MetricsRecorder {
fn new(runtime: RuntimeProxy, method_name: String) -> MetricsRecorder {
let metrics_data = runtime.metrics_data();
let timer = metrics_data
.grpc_server_metrics
.grpc_server_handled_latency_seconds
.with_label_values(&[&method_name])
.start_timer();
MetricsRecorder {
metrics_data,
method_name,
msg_count: 0,
_timer: timer,
}
}
fn observe_message_with_len(&mut self, msg_len: usize) {
self.msg_count += 1;
self.metrics_data
.grpc_server_metrics
.grpc_server_response_size_bytes
.with_label_values(&[&self.method_name])
.observe(msg_len as f64);
}
fn observe_completion(&self) {
self.metrics_data
.grpc_server_metrics
.grpc_server_msg_sent_total
.with_label_values(&[&self.method_name])
.observe(self.msg_count as f64);
}
}
impl Drop for MetricsRecorder {
fn drop(&mut self) {
self.observe_completion();
}
// Note that dropping self._timer will record the duration.
}
struct GrpcResponseIterator {
runtime: RuntimeProxy,
response_reader: oak_abi::Handle,
method_name: String,
// The lifetime of the metrics_recorder matches the lifetime of the
// iterator, updating the metrics when the iterator is dropped.
metrics_recorder: MetricsRecorder,
done: bool,
}
impl GrpcResponseIterator {
fn new(runtime: RuntimeProxy, response_reader: oak_abi::Handle, method_name: String) -> Self {
trace!(
"Create new GrpcResponseIterator for '{}', reading from {}",
method_name,
response_reader
);
let metrics_recorder = MetricsRecorder::new(runtime.clone(), method_name.clone());
GrpcResponseIterator {
runtime,
response_reader,
method_name,
metrics_recorder,
done: false,
}
}
}
/// Manual implementation of the `Drop` trait to ensure the response channel
/// is always closed.
impl Drop for GrpcResponseIterator {
fn drop(&mut self) {
trace!(
"Dropping GrpcResponseIterator for '{}': close channel {}",
self.method_name,
self.response_reader
);
if let Err(err) = self.runtime.channel_close(self.response_reader) {
error!("Failed to close gRPC response reader channel: {:?}", err);
}
// Note that dropping self.metrics_recorder will record the duration, and update the
// `grpc_server_msg_sent_total` metric.
}
}
impl Iterator for GrpcResponseIterator {
type Item = GrpcResponse;
/// Read a single encapsulated gRPC response from the provided channel.
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
let read_status = self
.runtime
.wait_on_channels(&[self.response_reader])
.map_err(|error| {
error!("Couldn't wait on the gRPC response channel: {:?}", error);
})
.ok()?;
if read_status[0] == ChannelReadStatus::ReadReady {
match self.runtime.channel_read(self.response_reader) {
Ok(Some(msg)) => match GrpcResponse::decode(msg.data.as_slice()) {
Ok(grpc_rsp) => {
self.metrics_recorder
.observe_message_with_len(grpc_rsp.rsp_msg.len());
if grpc_rsp.last {
// The Node has definitively marked this as the last response for this
// invocation; keep track of this and don't bother attempting to read
// from the response channel next time around.
//
// Note that the reverse isn't always true: the final response for a
// server-streaming method might *not* have last=true; in that case the
// next attempt to read from the response channel will find a closed
// channel, and so we treat that as the end of the method invocation
// (below).
self.done = true;
}
trace!(
"Return response of size {}, status={:?} last={}",
grpc_rsp.rsp_msg.len(),
grpc_rsp.status,
grpc_rsp.last
);
Some(grpc_rsp)
}
Err(err) => {
error!("Couldn't parse the GrpcResponse message: {}", err);
None
}
},
Ok(None) => {
error!("No message available on gRPC response channel");
None
}
Err(status) => {
error!("Couldn't read from the gRPC response channel: {:?}", status);
None
}
}
} else if read_status[0] == ChannelReadStatus::Orphaned {
debug!("gRPC response channel closed");
None
} else {
error!(
"Couldn't read from the gRPC response channel: {:?}",
read_status[0]
);
None
}
}
}
runtime: use "" as generic handler package prefix
This should catch all unhandled gRPC method invocations.
Fixes #1361.
//
// Copyright 2020 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! gRPC server pseudo-Node functionality.
use crate::{
auth::oidc_utils::ClientInfo,
metrics::Metrics,
node::{
grpc::{codec::VecCodec, to_tonic_status},
ConfigurationError, Node,
},
ChannelHalfDirection, RuntimeProxy,
};
use futures_util::stream;
use hyper::service::Service;
use log::{debug, error, info, trace, warn};
use oak_abi::{
label::Label, proto::oak::application::GrpcServerConfiguration, ChannelReadStatus, OakStatus,
};
use oak_services::proto::{
google::rpc,
oak::encap::{GrpcRequest, GrpcResponse},
};
use prost::Message;
use std::{
net::SocketAddr,
task::{Context, Poll},
};
use tokio::sync::oneshot;
use tonic::{
codegen::BoxFuture,
metadata::MetadataMap,
server::{Grpc, ServerStreamingService},
transport::{Identity, NamedService},
};
mod auth;
/// Struct that represents a gRPC server pseudo-Node.
pub struct GrpcServerNode {
/// Pseudo-Node name.
node_name: String,
/// Server address to listen client requests on.
address: SocketAddr,
/// Loaded files containing a server TLS key and certificates.
tls_identity: Identity,
/// OpenID Connect Authentication client information. A value of `None` will mean that the
/// server will not support OpenID Connect authentication.
oidc_client_info: Option<ClientInfo>,
}
/// Checks if port is greater than 1023.
fn check_port(address: &SocketAddr) -> Result<(), ConfigurationError> {
if address.port() > 1023 {
Ok(())
} else {
Err(ConfigurationError::IncorrectPort)
}
}
impl GrpcServerNode {
/// Creates a new [`GrpcServerNode`] instance, but does not start it.
pub fn new(
node_name: &str,
config: GrpcServerConfiguration,
tls_identity: Identity,
oidc_client_info: Option<ClientInfo>,
) -> Result<Self, ConfigurationError> {
let address = config.address.parse()?;
check_port(&address)?;
Ok(Self {
node_name: node_name.to_string(),
address,
tls_identity,
oidc_client_info,
})
}
/// Reads the [`oak_abi::Handle`] for the write half of an invocation from a startup channel.
/// Returns an error if the startup channel couldn't be read, or if the initial message
/// is invalid (doesn't contain exactly one write handle).
fn get_invocation_channel(
runtime: &RuntimeProxy,
startup_handle: oak_abi::Handle,
) -> Result<oak_abi::Handle, OakStatus> {
let read_status = runtime
.wait_on_channels(&[startup_handle])
.map_err(|error| {
error!("Couldn't wait on the initial reader handle: {:?}", error);
OakStatus::ErrInternal
})?;
// TODO(#389): Automatically generate this code.
let invocation_channel = if read_status[0] == ChannelReadStatus::ReadReady {
runtime
.channel_read(startup_handle)
.map_err(|error| {
error!("Couldn't read from the initial reader handle {:?}", error);
OakStatus::ErrInternal
})
.and_then(|message| {
message
.ok_or_else(|| {
error!("Empty message");
OakStatus::ErrInternal
})
.and_then(|m| {
if m.handles.len() == 1 {
let handle = m.handles[0];
match runtime.channel_direction(handle)? {
ChannelHalfDirection::Write => Ok(handle),
ChannelHalfDirection::Read => {
error!(
"gRPC server pseudo-node should receive a writer handle, found reader handle {}",
handle
);
Err(OakStatus::ErrBadHandle)
},
}
} else {
error!(
"gRPC server pseudo-node should receive a single writer handle, found {}",
m.handles.len()
);
Err(OakStatus::ErrInternal)
}
})
})
} else {
error!("Couldn't read channel: {:?}", read_status[0]);
Err(OakStatus::ErrInternal)
}?;
info!(
"Invocation channel write handle received: {}",
invocation_channel
);
Ok(invocation_channel)
}
}
/// Oak Node implementation for the gRPC server.
impl Node for GrpcServerNode {
fn run(
self: Box<Self>,
runtime: RuntimeProxy,
startup_handle: oak_abi::Handle,
notify_receiver: oneshot::Receiver<()>,
) {
// At start-of-day we need/expect to receive a write handle for an invocation channel
// to use for all subsequent activity.
info!("{}: Waiting for invocation channel", self.node_name);
let invocation_channel =
match GrpcServerNode::get_invocation_channel(&runtime, startup_handle) {
Ok(writer) => writer,
Err(status) => {
error!(
"Failed to retrieve invocation channel write handle: {:?}",
status
);
return;
}
};
if let Err(err) = runtime.channel_close(startup_handle) {
error!(
"Failed to close initial inbound channel {}: {:?}",
startup_handle, err
);
}
// Build a service to process incoming authentication gRPC requests.
let auth_handler = match self.oidc_client_info {
Some(auth_config) => auth::oidc_service::build_service(
&auth_config.client_id,
&auth_config.client_secret,
),
// TODO(#1021): Add better handling to cases where the client info is not supplied.
_ => auth::oidc_service::build_service("", ""),
};
// Build a service to process all other incoming HTTP/2 requests.
let generic_handler = HttpRequestHandler {
runtime,
invocation_channel,
};
let server = tonic::transport::Server::builder()
.tls_config(tonic::transport::ServerTlsConfig::new().identity(self.tls_identity))
// The order for adding services are important. The namespaces of the services are
// checked in the reverse order to which it was added. The `generic_handler` should
// be added first so that it is checked last, otherwise it would handle requests
// intended for other services.
.add_service(generic_handler)
.add_service(auth_handler)
.serve_with_shutdown(self.address, async {
// Treat notification failure the same as a notification.
let _ = notify_receiver.await;
});
// Create an Async runtime for executing futures.
// https://docs.rs/tokio/
let mut async_runtime = tokio::runtime::Builder::new()
// Use simple scheduler that runs all tasks on the current-thread.
// https://docs.rs/tokio/0.2.16/tokio/runtime/index.html#basic-scheduler
.basic_scheduler()
// Enables the I/O driver.
// Necessary for using net, process, signal, and I/O types on the Tokio runtime.
.enable_io()
// Enables the time driver.
// Necessary for creating a Tokio Runtime.
.enable_time()
.build()
.expect("Couldn't create Async runtime");
// Start the gRPC server.
info!(
"{}: Starting gRPC server pseudo-Node on: {}",
self.node_name, self.address
);
match async_runtime.block_on(server) {
Err(err) => warn!(
"{}: Error running gRPC server pseudo-Node: {}",
self.node_name, err
),
Ok(()) => {
info!(
"{}: Success running gRPC server pseudo-Node",
self.node_name,
);
}
}
}
}
/// [`HttpRequestHandler`] handles HTTP/2 requests from a client and sends HTTP/2 responses back.
#[derive(Clone)]
struct HttpRequestHandler {
/// Reference to the Runtime in the context of this gRPC server pseudo-Node.
runtime: RuntimeProxy,
/// Channel handle used for writing gRPC invocations.
invocation_channel: oak_abi::Handle,
}
/// Set a mandatory prefix for all gRPC requests processed by a gRPC pseudo-Node.
impl NamedService for HttpRequestHandler {
const NAME: &'static str = "";
}
impl Service<http::Request<hyper::Body>> for HttpRequestHandler {
type Response = http::Response<tonic::body::BoxBody>;
type Error = http::Error;
type Future = BoxFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
/// Decodes an unary gRPC request using a [`VecCodec`] and processes it with
/// [`tonic::server::Grpc::unary`] and a [`GrpcInvocationHandler`].
fn call(&mut self, request: http::Request<hyper::Body>) -> Self::Future {
let grpc_handler = GrpcInvocationHandler::new(
self.runtime.clone(),
self.invocation_channel,
request.uri().path().to_string(),
);
let method_name = request.uri().path().to_string();
let metrics_data = self.runtime.metrics_data();
let future = async move {
debug!("Processing HTTP/2 request: {:?}", request);
let mut grpc_service = Grpc::new(VecCodec::default());
let response = grpc_service.server_streaming(grpc_handler, request).await;
debug!("Sending HTTP/2 response: {:?}", response);
let stc = format!("{}", response.status());
metrics_data
.grpc_server_metrics
.grpc_server_handled_total
.with_label_values(&[&method_name, &stc])
.inc();
Ok(response)
};
Box::pin(future)
}
}
impl From<OakLabelError> for tonic::Status {
fn from(v: OakLabelError) -> Self {
match v {
OakLabelError::MissingLabel => tonic::Status::invalid_argument("Missing Oak Label"),
OakLabelError::MultipleLabels => tonic::Status::invalid_argument("Multiple Oak Labels"),
OakLabelError::InvalidLabel => tonic::Status::invalid_argument("Invalid Oak Label"),
}
}
}
enum OakLabelError {
MissingLabel,
MultipleLabels,
InvalidLabel,
}
/// Returns the [`Label`] defined as part of the the metadata of an incoming gRPC request.
///
/// Returns an error if there is not exactly one label specified by the caller:
///
/// - no labels means that the caller did not specify any IFC restrictions, which is probably a
/// mistake
/// - more than one labels means that the caller specified multiple IFC restrictions; if the
/// intention was to allow multiple alternative ones, they need to be combined in a single label,
/// once conjunctions are supported
fn get_oak_label(metadata_map: &MetadataMap) -> Result<Label, OakLabelError> {
let labels = metadata_map
.get_all_bin(oak_abi::OAK_LABEL_GRPC_METADATA_KEY)
.iter()
.collect::<Vec<_>>();
if labels.is_empty() {
warn!(
"incorrect number of gRPC labels found: {}, expected: 1",
labels.len()
);
return Err(OakLabelError::MissingLabel);
}
if labels.len() >= 2 {
warn!(
"incorrect number of gRPC labels found: {}, expected: 1",
labels.len()
);
return Err(OakLabelError::MultipleLabels);
}
let label_bytes = labels[0].to_bytes().map_err(|err| {
warn!("could not convert gRPC label to bytes: {}", err);
OakLabelError::InvalidLabel
})?;
oak_abi::proto::oak::label::Label::decode(label_bytes).map_err(|err| {
warn!("could not parse gRPC label: {}", err);
OakLabelError::InvalidLabel
})
}
/// Handler for an individual gRPC method invocation.
#[derive(Clone)]
struct GrpcInvocationHandler {
/// Reference to the Runtime in the context of this gRPC server pseudo-Node.
runtime: RuntimeProxy,
/// Channel handle used for writing gRPC invocations.
invocation_channel: oak_abi::Handle,
/// Name of the gRPC method being invoked.
method_name: String,
}
impl ServerStreamingService<Vec<u8>> for GrpcInvocationHandler {
type Response = Vec<u8>;
type ResponseStream = std::pin::Pin<
Box<dyn futures_core::Stream<Item = Result<Self::Response, tonic::Status>> + Send + Sync>,
>;
type Future = BoxFuture<tonic::Response<Self::ResponseStream>, tonic::Status>;
fn call(&mut self, request: tonic::Request<Vec<u8>>) -> Self::Future {
let handler = self.clone();
let metrics_data = self.runtime.metrics_data();
// Build a future of type `Future<Output = Result<Self::ResponseStream, tonic::Status>>`
let future = async move {
metrics_data
.grpc_server_metrics
.grpc_server_started_total
.with_label_values(&[&handler.method_name])
.inc();
let oak_label = get_oak_label(request.metadata())?;
info!(
"handling gRPC request; peer address: {}, method: {}, request size: {} bytes, label: {:?}",
// TODO(#1089): Ensure that the client address is available.
request.remote_addr().map(|addr| addr.to_string()).unwrap_or_else(|| "<unknown>".to_string()),
handler.method_name,
request.get_ref().len(),
oak_label
);
// Create an encapsulated gRPC request.
// TODO(#97): Add client-streaming support.
let grpc_request = GrpcRequest {
method_name: handler.method_name.to_string(),
req_msg: request.into_inner(),
last: true,
};
// Inject the encapsulated gRPC request into the Oak Application.
debug!("inject encapsulated request into Oak Node");
let response_iter = handler
.inject_grpc_request(grpc_request, &oak_label)
.map_err(|()| tonic::Status::new(tonic::Code::Internal, ""))?;
// First convert the `Iterator<Item = GrpcResponse>` to an
// `Iterator<Item = Result<Vec<u8>, tonic::Status>`.
let result_iter = response_iter.map(|response| {
debug!("Returning gRPC response: {:?}", response);
match response.status {
None => Ok(response.rsp_msg),
Some(status) if status.code == rpc::Code::Ok as i32 => Ok(response.rsp_msg),
Some(status) => Err(to_tonic_status(status)),
}
});
// Now convert this to a streaming future of type
// `Stream<Item = Result<Vec<u8>, tonic::Status>`
// and then wrap it in Pin<Box<.>> to build a `Self::ResponseStream`.
let result_stream: Self::ResponseStream = Box::pin(stream::iter(result_iter));
// Finally, ensure this block returns an `Ok(tonic::Response<Self::ResponseStream>)`.
Ok(tonic::Response::new(result_stream))
};
Box::pin(future)
}
}
impl GrpcInvocationHandler {
fn new(
runtime: RuntimeProxy,
invocation_channel: oak_abi::Handle,
method_name: String,
) -> Self {
Self {
runtime,
invocation_channel,
method_name,
}
}
/// Send an encapsulated gRPC request into the Oak Application as an invocation.
/// Returns an [`oak_abi::Handle`] for reading gRPC response(s) from.
fn inject_grpc_request(
&self,
request: GrpcRequest,
label: &Label,
) -> Result<GrpcResponseIterator, ()> {
// Create a pair of temporary channels to pass the gRPC request and to receive the response.
// The channel containing the request is created with the label specified by the caller.
// This will fail if the label has a non-empty integrity component.
let (request_writer, request_reader) =
self.runtime.channel_create(&label).map_err(|err| {
warn!("could not create gRPC request channel: {:?}", err);
})?;
let (response_writer, response_reader) = self
.runtime
.channel_create(&Label::public_untrusted())
.map_err(|err| {
warn!("could not create gRPC response channel: {:?}", err);
})?;
// Create an invocation message and attach the method-invocation specific channels to it.
//
// This message should be in sync with the [`oak::grpc::Invocation`] from the Oak SDK:
// the order of the `request_reader` and `response_writer` must be consistent.
let invocation = crate::NodeMessage {
data: vec![],
handles: vec![request_reader, response_writer],
};
// Serialize gRPC request into a message.
let mut message = crate::NodeMessage {
data: vec![],
handles: vec![],
};
request.encode(&mut message.data).map_err(|error| {
error!("Couldn't serialize GrpcRequest message: {}", error);
})?;
// Put the gRPC request message inside the per-invocation request channel.
self.runtime
.channel_write(request_writer, message)
.map_err(|error| {
error!(
"Couldn't write message to the gRPC request channel: {:?}",
error
);
})?;
// Send an invocation message (with attached handles) to the Oak Node.
self.runtime
.channel_write(self.invocation_channel, invocation)
.map_err(|error| {
error!("Couldn't write gRPC invocation message: {:?}", error);
})?;
// Close all local handles except for the one that allows reading responses.
if let Err(err) = self.runtime.channel_close(request_writer) {
error!(
"Failed to close request writer channel for invocation: {:?}",
err
);
}
if let Err(err) = self.runtime.channel_close(request_reader) {
error!(
"Failed to close request reader channel for invocation: {:?}",
err
);
}
if let Err(err) = self.runtime.channel_close(response_writer) {
error!(
"Failed to close response writer channel for invocation: {:?}",
err
);
}
Ok(GrpcResponseIterator::new(
self.runtime.clone(),
response_reader,
self.method_name.clone(),
))
}
}
struct MetricsRecorder {
metrics_data: Metrics,
method_name: String,
msg_count: u32,
_timer: prometheus::HistogramTimer,
}
impl MetricsRecorder {
fn new(runtime: RuntimeProxy, method_name: String) -> MetricsRecorder {
let metrics_data = runtime.metrics_data();
let timer = metrics_data
.grpc_server_metrics
.grpc_server_handled_latency_seconds
.with_label_values(&[&method_name])
.start_timer();
MetricsRecorder {
metrics_data,
method_name,
msg_count: 0,
_timer: timer,
}
}
fn observe_message_with_len(&mut self, msg_len: usize) {
self.msg_count += 1;
self.metrics_data
.grpc_server_metrics
.grpc_server_response_size_bytes
.with_label_values(&[&self.method_name])
.observe(msg_len as f64);
}
fn observe_completion(&self) {
self.metrics_data
.grpc_server_metrics
.grpc_server_msg_sent_total
.with_label_values(&[&self.method_name])
.observe(self.msg_count as f64);
}
}
impl Drop for MetricsRecorder {
fn drop(&mut self) {
self.observe_completion();
}
// Note that dropping self._timer will record the duration.
}
struct GrpcResponseIterator {
runtime: RuntimeProxy,
response_reader: oak_abi::Handle,
method_name: String,
// The lifetime of the metrics_recorder matches the lifetime of the
// iterator, updating the metrics when the iterator is dropped.
metrics_recorder: MetricsRecorder,
done: bool,
}
impl GrpcResponseIterator {
fn new(runtime: RuntimeProxy, response_reader: oak_abi::Handle, method_name: String) -> Self {
trace!(
"Create new GrpcResponseIterator for '{}', reading from {}",
method_name,
response_reader
);
let metrics_recorder = MetricsRecorder::new(runtime.clone(), method_name.clone());
GrpcResponseIterator {
runtime,
response_reader,
method_name,
metrics_recorder,
done: false,
}
}
}
/// Manual implementation of the `Drop` trait to ensure the response channel
/// is always closed.
impl Drop for GrpcResponseIterator {
fn drop(&mut self) {
trace!(
"Dropping GrpcResponseIterator for '{}': close channel {}",
self.method_name,
self.response_reader
);
if let Err(err) = self.runtime.channel_close(self.response_reader) {
error!("Failed to close gRPC response reader channel: {:?}", err);
}
// Note that dropping self.metrics_recorder will record the duration, and update the
// `grpc_server_msg_sent_total` metric.
}
}
impl Iterator for GrpcResponseIterator {
type Item = GrpcResponse;
/// Read a single encapsulated gRPC response from the provided channel.
fn next(&mut self) -> Option<Self::Item> {
if self.done {
return None;
}
let read_status = self
.runtime
.wait_on_channels(&[self.response_reader])
.map_err(|error| {
error!("Couldn't wait on the gRPC response channel: {:?}", error);
})
.ok()?;
if read_status[0] == ChannelReadStatus::ReadReady {
match self.runtime.channel_read(self.response_reader) {
Ok(Some(msg)) => match GrpcResponse::decode(msg.data.as_slice()) {
Ok(grpc_rsp) => {
self.metrics_recorder
.observe_message_with_len(grpc_rsp.rsp_msg.len());
if grpc_rsp.last {
// The Node has definitively marked this as the last response for this
// invocation; keep track of this and don't bother attempting to read
// from the response channel next time around.
//
// Note that the reverse isn't always true: the final response for a
// server-streaming method might *not* have last=true; in that case the
// next attempt to read from the response channel will find a closed
// channel, and so we treat that as the end of the method invocation
// (below).
self.done = true;
}
trace!(
"Return response of size {}, status={:?} last={}",
grpc_rsp.rsp_msg.len(),
grpc_rsp.status,
grpc_rsp.last
);
Some(grpc_rsp)
}
Err(err) => {
error!("Couldn't parse the GrpcResponse message: {}", err);
None
}
},
Ok(None) => {
error!("No message available on gRPC response channel");
None
}
Err(status) => {
error!("Couldn't read from the gRPC response channel: {:?}", status);
None
}
}
} else if read_status[0] == ChannelReadStatus::Orphaned {
debug!("gRPC response channel closed");
None
} else {
error!(
"Couldn't read from the gRPC response channel: {:?}",
read_status[0]
);
None
}
}
}
|
use std::collections::HashSet;
use crate::language::error::msg::SemError;
use crate::language::sem_analysis::{
FctDefinition, FctDefinitionId, FctParent, SemAnalysis, TypeParam, TypeParamId,
};
use crate::language::sym::{NestedSymTable, Sym};
use crate::language::ty::SourceType;
use crate::language::{self, AllowSelf, TypeParamContext};
pub fn check(sa: &SemAnalysis) {
for fct in sa.fcts.iter() {
let mut fct = fct.write();
let ast = fct.ast.clone();
// check modifiers for function
check_abstract(sa, &*fct);
check_static(sa, &*fct);
check_test(sa, &*fct);
let mut sym_table = NestedSymTable::new(sa, fct.module_id);
sym_table.push_level();
match fct.parent {
FctParent::Class(owner_class) => {
let cls = sa.classes.idx(owner_class);
let cls = cls.read();
for (type_param_id, param) in cls.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(cls.ty());
}
}
FctParent::Impl(impl_id) => {
let impl_ = sa.impls[impl_id].read();
for (type_param_id, param) in impl_.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(impl_.ty.clone());
}
}
FctParent::Extension(extension_id) => {
let extension = sa.extensions[extension_id].read();
for (type_param_id, param) in extension.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(extension.ty.clone());
}
}
FctParent::Trait(trait_id) => {
let trait_ = sa.traits[trait_id].read();
for (type_param_id, param) in trait_.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(SourceType::This);
}
}
FctParent::None => {}
}
let container_type_params = fct.type_params.len();
fct.container_type_params = container_type_params;
if let Some(ref type_params) = ast.type_params {
if type_params.len() > 0 {
let mut names = HashSet::new();
for (type_param_id, type_param) in type_params.iter().enumerate() {
if !names.insert(type_param.name) {
let name = sa.interner.str(type_param.name).to_string();
let msg = SemError::TypeParamNameNotUnique(name);
sa.diag.lock().report(fct.file_id, type_param.pos, msg);
}
fct.type_params.push(TypeParam::new(type_param.name));
for bound in &type_param.bounds {
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
bound,
TypeParamContext::Fct(&*fct),
AllowSelf::No,
);
match ty {
Some(SourceType::Trait(trait_id, _)) => {
if !fct.type_params[container_type_params + type_param_id]
.trait_bounds
.insert(trait_id)
{
let msg = SemError::DuplicateTraitBound;
sa.diag.lock().report(fct.file_id, type_param.pos, msg);
}
}
None => {
// unknown type, error is already thrown
}
_ => {
let msg = SemError::BoundExpected;
sa.diag.lock().report(fct.file_id, bound.pos(), msg);
}
}
}
let sym = Sym::TypeParam(TypeParamId(container_type_params + type_param_id));
sym_table.insert(type_param.name, sym);
}
} else {
let msg = SemError::TypeParamsExpected;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
for p in &ast.params {
if fct.is_variadic {
sa.diag
.lock()
.report(fct.file_id, p.pos, SemError::VariadicParameterNeedsToBeLast);
}
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
&p.data_type,
TypeParamContext::Fct(&*fct),
if fct.in_trait() {
AllowSelf::Yes
} else {
AllowSelf::No
},
)
.unwrap_or(SourceType::Error);
fct.param_types.push(ty);
if p.variadic {
fct.is_variadic = true;
}
}
if let Some(ret) = ast.return_type.as_ref() {
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
ret,
TypeParamContext::Fct(&*fct),
if fct.in_trait() {
AllowSelf::Yes
} else {
AllowSelf::No
},
)
.unwrap_or(SourceType::Error);
fct.return_type = ty;
} else {
fct.return_type = SourceType::Unit;
}
fct.initialized = true;
match fct.parent {
FctParent::Class(clsid) => {
let cls = sa.classes.idx(clsid);
let cls = cls.read();
check_against_methods(sa, &*fct, &cls.methods);
}
FctParent::Trait(traitid) => {
let trait_ = sa.traits[traitid].read();
check_against_methods(sa, &*fct, &trait_.methods);
}
FctParent::Impl(implid) => {
let impl_ = sa.impls[implid].read();
check_against_methods(sa, &*fct, &impl_.methods);
}
_ => {}
}
}
}
fn check_abstract(sa: &SemAnalysis, fct: &FctDefinition) {
if !fct.is_abstract {
return;
}
let cls_id = fct.cls_id();
let cls = sa.classes.idx(cls_id);
let cls = cls.read();
if fct.has_body() {
let msg = SemError::AbstractMethodWithImplementation;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
if !cls.is_abstract {
let msg = SemError::AbstractMethodNotInAbstractClass;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_static(sa: &SemAnalysis, fct: &FctDefinition) {
if !fct.is_static {
return;
}
// static isn't allowed with these modifiers
if fct.is_abstract || fct.is_open || fct.is_override || fct.is_final {
let modifier = if fct.is_abstract {
"abstract"
} else if fct.is_open {
"open"
} else if fct.is_override {
"override"
} else {
"final"
};
let msg = SemError::ModifierNotAllowedForStaticMethod(modifier.into());
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_test(sa: &SemAnalysis, fct: &FctDefinition) {
if !fct.is_test {
return;
}
if !fct.parent.is_none()
|| !fct.type_params.is_empty()
|| !fct.param_types.is_empty()
|| (!fct.return_type.is_unit() && !fct.return_type.is_error())
{
println!(
"parent.is_none()={} type_params.is_empty()={} param_types.is_empty()={} return_type.is_unit()={} return_type={:?}",
fct.parent.is_none(),
fct.type_params.is_empty(),
fct.param_types.is_empty(),
fct.return_type.is_unit(),
fct.return_type
);
let msg = SemError::InvalidTestAnnotationUsage;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_against_methods(sa: &SemAnalysis, fct: &FctDefinition, methods: &[FctDefinitionId]) {
for &method in methods {
if method == fct.id() {
continue;
}
let method = sa.fcts.idx(method);
let method = method.read();
if method.initialized && method.name == fct.name && method.is_static == fct.is_static {
let method_name = sa.interner.str(method.name).to_string();
let msg = SemError::MethodExists(method_name, method.pos);
sa.diag.lock().report(fct.file_id, fct.ast.pos, msg);
return;
}
}
}
#[cfg(test)]
mod tests {
use crate::language::error::msg::SemError;
use crate::language::tests::*;
#[test]
fn self_param() {
err(
"fn foo(x: Self) {}",
pos(1, 11),
SemError::SelfTypeUnavailable,
);
}
#[test]
fn self_return_type() {
err(
"fn foo(): Self {}",
pos(1, 11),
SemError::SelfTypeUnavailable,
);
}
#[test]
fn allow_same_method_as_static_and_non_static() {
err(
"class Foo {
@static fn foo() {}
fn foo() {}
}",
pos(3, 17),
SemError::MethodExists("foo".into(), pos(2, 25)),
);
}
#[test]
fn fct_with_type_params() {
ok("fn f[T]() {}");
ok("fn f[X, Y]() {}");
err(
"fn f[T, T]() {}",
pos(1, 9),
SemError::TypeParamNameNotUnique("T".into()),
);
err("fn f[]() {}", pos(1, 1), SemError::TypeParamsExpected);
}
#[test]
fn fct_with_type_param_in_annotation() {
ok("fn f[T](val: T) {}");
}
#[test]
fn abstract_method_in_non_abstract_class() {
err(
"class A { @abstract fn foo(); }",
pos(1, 21),
SemError::AbstractMethodNotInAbstractClass,
);
}
#[test]
fn abstract_method_with_implementation() {
err(
"@abstract class A { @abstract fn foo() {} }",
pos(1, 31),
SemError::AbstractMethodWithImplementation,
);
}
#[test]
fn abstract_static_method() {
err(
"@abstract class A { @static @abstract fn foo(); }",
pos(1, 39),
SemError::ModifierNotAllowedForStaticMethod("abstract".into()),
);
}
#[test]
fn open_static_method() {
err(
"@abstract class A { @static @open fn foo() {} }",
pos(1, 35),
SemError::ModifierNotAllowedForStaticMethod("open".into()),
);
}
#[test]
fn override_static_method() {
err(
"@abstract class A { @static @override fn foo() {} }",
pos(1, 39),
SemError::ModifierNotAllowedForStaticMethod("override".into()),
);
}
#[test]
fn final_static_method() {
err(
"@abstract class A { @final @static fn foo() {} }",
pos(1, 36),
SemError::ModifierNotAllowedForStaticMethod("final".into()),
);
}
#[test]
fn lambdas() {
ok("fn f() { || {}; }");
ok("fn f() { |a: Int32| {}; }");
ok("fn f() { || -> Int32 { return 2; }; }");
err(
"fn f() { || -> Foo { }; }",
pos(1, 16),
SemError::UnknownIdentifier("Foo".into()),
);
err(
"fn f() { |a: Foo| { }; }",
pos(1, 14),
SemError::UnknownIdentifier("Foo".into()),
);
}
#[test]
fn generic_bounds() {
err(
"fn f[T: Foo]() {}",
pos(1, 9),
SemError::UnknownIdentifier("Foo".into()),
);
err(
"class Foo fn f[T: Foo]() {}",
pos(1, 19),
SemError::BoundExpected,
);
ok("trait Foo {} fn f[T: Foo]() {}");
err(
"trait Foo {}
fn f[T: Foo + Foo]() { }",
pos(2, 18),
SemError::DuplicateTraitBound,
);
}
#[test]
fn check_previous_defined_type_params() {
// Type params need to be cleaned up such that the following code is an error:
err(
"fn f(a: T) {}",
pos(1, 9),
SemError::UnknownIdentifier("T".into()),
);
}
}
@Test annotation needs to be checked when initialized
use std::collections::HashSet;
use crate::language::error::msg::SemError;
use crate::language::sem_analysis::{
FctDefinition, FctDefinitionId, FctParent, SemAnalysis, TypeParam, TypeParamId,
};
use crate::language::sym::{NestedSymTable, Sym};
use crate::language::ty::SourceType;
use crate::language::{self, AllowSelf, TypeParamContext};
pub fn check(sa: &SemAnalysis) {
for fct in sa.fcts.iter() {
let mut fct = fct.write();
let ast = fct.ast.clone();
// check modifiers for function
check_abstract(sa, &*fct);
check_static(sa, &*fct);
let mut sym_table = NestedSymTable::new(sa, fct.module_id);
sym_table.push_level();
match fct.parent {
FctParent::Class(owner_class) => {
let cls = sa.classes.idx(owner_class);
let cls = cls.read();
for (type_param_id, param) in cls.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(cls.ty());
}
}
FctParent::Impl(impl_id) => {
let impl_ = sa.impls[impl_id].read();
for (type_param_id, param) in impl_.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(impl_.ty.clone());
}
}
FctParent::Extension(extension_id) => {
let extension = sa.extensions[extension_id].read();
for (type_param_id, param) in extension.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(extension.ty.clone());
}
}
FctParent::Trait(trait_id) => {
let trait_ = sa.traits[trait_id].read();
for (type_param_id, param) in trait_.type_params.iter().enumerate() {
let sym = Sym::TypeParam(TypeParamId(type_param_id));
sym_table.insert(param.name, sym);
fct.type_params.push(param.clone());
}
if fct.has_self() {
fct.param_types.push(SourceType::This);
}
}
FctParent::None => {}
}
let container_type_params = fct.type_params.len();
fct.container_type_params = container_type_params;
if let Some(ref type_params) = ast.type_params {
if type_params.len() > 0 {
let mut names = HashSet::new();
for (type_param_id, type_param) in type_params.iter().enumerate() {
if !names.insert(type_param.name) {
let name = sa.interner.str(type_param.name).to_string();
let msg = SemError::TypeParamNameNotUnique(name);
sa.diag.lock().report(fct.file_id, type_param.pos, msg);
}
fct.type_params.push(TypeParam::new(type_param.name));
for bound in &type_param.bounds {
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
bound,
TypeParamContext::Fct(&*fct),
AllowSelf::No,
);
match ty {
Some(SourceType::Trait(trait_id, _)) => {
if !fct.type_params[container_type_params + type_param_id]
.trait_bounds
.insert(trait_id)
{
let msg = SemError::DuplicateTraitBound;
sa.diag.lock().report(fct.file_id, type_param.pos, msg);
}
}
None => {
// unknown type, error is already thrown
}
_ => {
let msg = SemError::BoundExpected;
sa.diag.lock().report(fct.file_id, bound.pos(), msg);
}
}
}
let sym = Sym::TypeParam(TypeParamId(container_type_params + type_param_id));
sym_table.insert(type_param.name, sym);
}
} else {
let msg = SemError::TypeParamsExpected;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
for p in &ast.params {
if fct.is_variadic {
sa.diag
.lock()
.report(fct.file_id, p.pos, SemError::VariadicParameterNeedsToBeLast);
}
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
&p.data_type,
TypeParamContext::Fct(&*fct),
if fct.in_trait() {
AllowSelf::Yes
} else {
AllowSelf::No
},
)
.unwrap_or(SourceType::Error);
fct.param_types.push(ty);
if p.variadic {
fct.is_variadic = true;
}
}
if let Some(ret) = ast.return_type.as_ref() {
let ty = language::read_type(
sa,
&sym_table,
fct.file_id,
ret,
TypeParamContext::Fct(&*fct),
if fct.in_trait() {
AllowSelf::Yes
} else {
AllowSelf::No
},
)
.unwrap_or(SourceType::Error);
fct.return_type = ty;
} else {
fct.return_type = SourceType::Unit;
}
fct.initialized = true;
check_test(sa, &*fct);
match fct.parent {
FctParent::Class(clsid) => {
let cls = sa.classes.idx(clsid);
let cls = cls.read();
check_against_methods(sa, &*fct, &cls.methods);
}
FctParent::Trait(traitid) => {
let trait_ = sa.traits[traitid].read();
check_against_methods(sa, &*fct, &trait_.methods);
}
FctParent::Impl(implid) => {
let impl_ = sa.impls[implid].read();
check_against_methods(sa, &*fct, &impl_.methods);
}
_ => {}
}
}
}
fn check_abstract(sa: &SemAnalysis, fct: &FctDefinition) {
if !fct.is_abstract {
return;
}
let cls_id = fct.cls_id();
let cls = sa.classes.idx(cls_id);
let cls = cls.read();
if fct.has_body() {
let msg = SemError::AbstractMethodWithImplementation;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
if !cls.is_abstract {
let msg = SemError::AbstractMethodNotInAbstractClass;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_static(sa: &SemAnalysis, fct: &FctDefinition) {
if !fct.is_static {
return;
}
// static isn't allowed with these modifiers
if fct.is_abstract || fct.is_open || fct.is_override || fct.is_final {
let modifier = if fct.is_abstract {
"abstract"
} else if fct.is_open {
"open"
} else if fct.is_override {
"override"
} else {
"final"
};
let msg = SemError::ModifierNotAllowedForStaticMethod(modifier.into());
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_test(sa: &SemAnalysis, fct: &FctDefinition) {
debug_assert!(fct.initialized);
if !fct.is_test {
return;
}
if !fct.parent.is_none()
|| !fct.type_params.is_empty()
|| !fct.param_types.is_empty()
|| (!fct.return_type.is_unit() && !fct.return_type.is_error())
{
let msg = SemError::InvalidTestAnnotationUsage;
sa.diag.lock().report(fct.file_id, fct.pos, msg);
}
}
fn check_against_methods(sa: &SemAnalysis, fct: &FctDefinition, methods: &[FctDefinitionId]) {
for &method in methods {
if method == fct.id() {
continue;
}
let method = sa.fcts.idx(method);
let method = method.read();
if method.initialized && method.name == fct.name && method.is_static == fct.is_static {
let method_name = sa.interner.str(method.name).to_string();
let msg = SemError::MethodExists(method_name, method.pos);
sa.diag.lock().report(fct.file_id, fct.ast.pos, msg);
return;
}
}
}
#[cfg(test)]
mod tests {
use crate::language::error::msg::SemError;
use crate::language::tests::*;
#[test]
fn self_param() {
err(
"fn foo(x: Self) {}",
pos(1, 11),
SemError::SelfTypeUnavailable,
);
}
#[test]
fn self_return_type() {
err(
"fn foo(): Self {}",
pos(1, 11),
SemError::SelfTypeUnavailable,
);
}
#[test]
fn allow_same_method_as_static_and_non_static() {
err(
"class Foo {
@static fn foo() {}
fn foo() {}
}",
pos(3, 17),
SemError::MethodExists("foo".into(), pos(2, 25)),
);
}
#[test]
fn fct_with_type_params() {
ok("fn f[T]() {}");
ok("fn f[X, Y]() {}");
err(
"fn f[T, T]() {}",
pos(1, 9),
SemError::TypeParamNameNotUnique("T".into()),
);
err("fn f[]() {}", pos(1, 1), SemError::TypeParamsExpected);
}
#[test]
fn fct_with_type_param_in_annotation() {
ok("fn f[T](val: T) {}");
}
#[test]
fn abstract_method_in_non_abstract_class() {
err(
"class A { @abstract fn foo(); }",
pos(1, 21),
SemError::AbstractMethodNotInAbstractClass,
);
}
#[test]
fn abstract_method_with_implementation() {
err(
"@abstract class A { @abstract fn foo() {} }",
pos(1, 31),
SemError::AbstractMethodWithImplementation,
);
}
#[test]
fn abstract_static_method() {
err(
"@abstract class A { @static @abstract fn foo(); }",
pos(1, 39),
SemError::ModifierNotAllowedForStaticMethod("abstract".into()),
);
}
#[test]
fn open_static_method() {
err(
"@abstract class A { @static @open fn foo() {} }",
pos(1, 35),
SemError::ModifierNotAllowedForStaticMethod("open".into()),
);
}
#[test]
fn override_static_method() {
err(
"@abstract class A { @static @override fn foo() {} }",
pos(1, 39),
SemError::ModifierNotAllowedForStaticMethod("override".into()),
);
}
#[test]
fn final_static_method() {
err(
"@abstract class A { @final @static fn foo() {} }",
pos(1, 36),
SemError::ModifierNotAllowedForStaticMethod("final".into()),
);
}
#[test]
fn lambdas() {
ok("fn f() { || {}; }");
ok("fn f() { |a: Int32| {}; }");
ok("fn f() { || -> Int32 { return 2; }; }");
err(
"fn f() { || -> Foo { }; }",
pos(1, 16),
SemError::UnknownIdentifier("Foo".into()),
);
err(
"fn f() { |a: Foo| { }; }",
pos(1, 14),
SemError::UnknownIdentifier("Foo".into()),
);
}
#[test]
fn generic_bounds() {
err(
"fn f[T: Foo]() {}",
pos(1, 9),
SemError::UnknownIdentifier("Foo".into()),
);
err(
"class Foo fn f[T: Foo]() {}",
pos(1, 19),
SemError::BoundExpected,
);
ok("trait Foo {} fn f[T: Foo]() {}");
err(
"trait Foo {}
fn f[T: Foo + Foo]() { }",
pos(2, 18),
SemError::DuplicateTraitBound,
);
}
#[test]
fn check_previous_defined_type_params() {
// Type params need to be cleaned up such that the following code is an error:
err(
"fn f(a: T) {}",
pos(1, 9),
SemError::UnknownIdentifier("T".into()),
);
}
}
|
extern crate cargotest;
extern crate cargo;
extern crate hamcrest;
use std::str;
use cargo::util::process;
use cargotest::is_nightly;
use cargotest::support::paths::CargoPathExt;
use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest};
use hamcrest::{assert_that, existing_file};
#[test]
fn cargo_bench_simple() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "hello")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.5.0 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
.with_stdout_contains("test bench_hello ... bench: [..]"));
}
#[test]
fn bench_bench_implicit() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
fn main() { println!("Hello main!"); }"#)
.file("tests/other.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run3(_ben: &mut test::Bencher) { }"#)
.file("benches/mybench.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--benches"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]mybench-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run2 ... bench: [..]"));
}
#[test]
fn bench_bin_implicit() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
fn main() { println!("Hello main!"); }"#)
.file("tests/other.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run3(_ben: &mut test::Bencher) { }"#)
.file("benches/mybench.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--bins"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run1 ... bench: [..]"));
}
#[test]
fn bench_tarname() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("benches/bin1.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }"#)
.file("benches/bin2.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--bench").arg("bin2"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]bin2-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run2 ... bench: [..]"));
}
#[test]
fn cargo_bench_verbose() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn main() {}
#[bench] fn bench_hello(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench").arg("-v").arg("hello"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc [..] src[/]foo.rs [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`", url = p.url()))
.with_stdout_contains("test bench_hello ... bench: [..]"));
}
#[test]
fn many_similar_names() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
pub fn foo() {}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
fn main() {}
#[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
")
.file("benches/foo.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
"#);
let output = p.cargo_process("bench").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output);
assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output);
assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output);
}
#[test]
fn cargo_bench_failing_test() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "nope")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("bench"),
execs().with_stdout_contains("test bench_hello ... ")
.with_stderr_contains(format!("\
[COMPILING] foo v0.5.0 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
thread '[..]' panicked at 'assertion failed: \
`(left == right)`[..]", p.url()))
.with_stderr_contains("[..]left: `\"hello\"`[..]")
.with_stderr_contains("[..]right: `\"nope\"`[..]")
.with_stderr_contains("[..]src[/]foo.rs:14")
.with_status(101));
}
#[test]
fn bench_with_lib_dep() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name = "baz"
path = "src/main.rs"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
///
/// ```rust
/// extern crate foo;
/// fn main() {
/// println!("{}", foo::foo());
/// }
/// ```
///
pub fn foo(){}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
"#)
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
fn main() {}
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]", p.url()))
.with_stdout_contains("test lib_bench ... bench: [..]")
.with_stdout_contains("test bin_bench ... bench: [..]"));
}
#[test]
fn bench_with_deep_lib_dep() {
if !is_nightly() { return }
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[dependencies.foo]
path = "../foo"
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn bar_bench(_b: &mut test::Bencher) {
foo::foo();
}
");
let p2 = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
");
p2.build();
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bar_bench ... bench: [..]"));
}
#[test]
fn external_bench_explicit() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bench]]
name = "bench"
path = "src/bench.rs"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("src/bench.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", p.url()))
.with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]"));
}
#[test]
fn external_bench_implicit() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("benches/external.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]external-[..][EXE]", p.url()))
.with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]"));
}
#[test]
fn dont_run_examples() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
"#)
.file("examples/dont-run-me-i-will-fail.rs", r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
"#);
assert_that(p.cargo_process("bench"),
execs().with_status(0));
}
#[test]
fn pass_through_command_line() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench] fn foo(_b: &mut test::Bencher) {}
#[bench] fn bar(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench").arg("bar"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bar ... bench: [..]"));
assert_that(p.cargo("bench").arg("foo"),
execs().with_status(0)
.with_stderr("[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test foo ... bench: [..]"));
}
// Regression test for running cargo-bench twice with
// tests in an rlib
#[test]
fn cargo_bench_twice() {
if !is_nightly() { return }
let p = project("test_twice")
.file("Cargo.toml", &basic_lib_manifest("test_twice"))
.file("src/test_twice.rs", r#"
#![crate_type = "rlib"]
#![feature(test)]
extern crate test;
#[bench]
fn dummy_bench(b: &mut test::Bencher) { }
"#);
p.cargo_process("build");
for _ in 0..2 {
assert_that(p.cargo("bench"),
execs().with_status(0));
}
}
#[test]
fn lib_bin_same_name() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
.with_stdout_contains_n("test [..] ... bench: [..]", 2));
}
#[test]
fn lib_with_standard_name() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
/// ```
/// syntax::foo();
/// ```
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
")
.file("benches/bench.rs", "
#![feature(test)]
extern crate syntax;
extern crate test;
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]
[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", dir = p.url()))
.with_stdout_contains("test foo_bench ... bench: [..]")
.with_stdout_contains("test bench ... bench: [..]"));
}
#[test]
fn lib_with_standard_name2() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
name = "syntax"
bench = false
doctest = false
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate syntax;
extern crate test;
fn main() {}
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bench ... bench: [..]"));
}
#[test]
fn bench_dylib() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate_type = ["dylib"]
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate bar as the_bar;
extern crate test;
pub fn bar() { the_bar::baz(); }
#[bench]
fn foo(_b: &mut test::Bencher) {}
"#)
.file("benches/bench.rs", r#"
#![feature(test)]
extern crate foo as the_foo;
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "
pub fn baz() {}
");
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({dir}/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
.with_stdout_contains_n("test foo ... bench: [..]", 2));
p.root().move_into_the_past();
assert_that(p.cargo("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[FRESH] bar v0.0.1 ({dir}/bar)
[FRESH] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
.with_stdout_contains_n("test foo ... bench: [..]", 2));
}
#[test]
fn bench_twice_with_build_cmd() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
.with_stdout_contains("test foo ... bench: [..]"));
assert_that(p.cargo("bench"),
execs().with_status(0)
.with_stderr("[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test foo ... bench: [..]"));
}
#[test]
fn bench_with_examples() {
if !is_nightly() { return }
let p = project("testbench")
.file("Cargo.toml", r#"
[package]
name = "testbench"
version = "6.6.6"
authors = []
[[example]]
name = "teste1"
[[bench]]
name = "testb1"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
#[cfg(test)]
use test::Bencher;
pub fn f1() {
println!("f1");
}
pub fn f2() {}
#[bench]
fn bench_bench1(_b: &mut Bencher) {
f2();
}
"#)
.file("benches/testb1.rs", "
#![feature(test)]
extern crate testbench;
extern crate test;
use test::Bencher;
#[bench]
fn bench_bench2(_b: &mut Bencher) {
testbench::f2();
}
")
.file("examples/teste1.rs", r#"
extern crate testbench;
fn main() {
println!("example1");
testbench::f1();
}
"#);
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] testbench v6.6.6 ({url})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testbench-[..][EXE] --bench`
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testb1-[..][EXE] --bench`",
dir = p.root().display(), url = p.url()))
.with_stdout_contains("test bench_bench1 ... bench: [..]")
.with_stdout_contains("test bench_bench2 ... bench: [..]"));
}
#[test]
fn test_a_bench() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[lib]
name = "foo"
test = false
doctest = false
[[bench]]
name = "b"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/b.rs", r#"
#[test]
fn foo() {}
"#);
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]")
.with_stdout_contains("test foo ... ok"));
}
#[test]
fn test_bench_no_run() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
"#)
.file("src/lib.rs", "")
.file("benches/bbaz.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_baz(_: &mut Bencher) {}
"#);
assert_that(p.cargo_process("bench").arg("--no-run"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
"));
}
#[test]
fn test_bench_multiple_packages() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[dependencies.bar]
path = "../bar"
[dependencies.baz]
path = "../baz"
"#)
.file("src/lib.rs", "");
let bar = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
authors = []
version = "0.1.0"
[[bench]]
name = "bbar"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/bbar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_b: &mut Bencher) {}
"#);
bar.build();
let baz = project("baz")
.file("Cargo.toml", r#"
[project]
name = "baz"
authors = []
version = "0.1.0"
[[bench]]
name = "bbaz"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/bbaz.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_baz(_b: &mut Bencher) {}
"#);
baz.build();
assert_that(p.cargo_process("bench").arg("-p").arg("bar").arg("-p").arg("baz"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]")
.with_stdout_contains("test bench_baz ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]"));
}
#[test]
fn bench_all_workspace() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("benches/foo.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("bar/benches/bar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
"#);
assert_that(p.cargo_process("bench")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test bench_foo ... bench: [..]"));
}
#[test]
fn bench_all_exclude() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
#![feature(test)]
extern crate test;
#[bench]
pub fn bar(b: &mut test::Bencher) {
b.iter(|| {});
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.1.0"
"#)
.file("baz/src/lib.rs", r#"
#[test]
pub fn baz() {
break_the_build();
}
"#);
assert_that(p.cargo_process("bench")
.arg("--all")
.arg("--exclude")
.arg("baz"),
execs().with_status(0)
.with_stdout_contains("\
running 1 test
test bar ... bench: [..] ns/iter (+/- [..])"));
}
#[test]
fn bench_all_virtual_manifest() {
if !is_nightly() { return }
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("foo/benches/foo.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("bar/benches/bar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
"#);
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo_process("bench")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test bench_foo ... bench: [..]"));
}
Let the cargo_bench_failing_test tolerate col info
Needed by https://github.com/rust-lang/rust/pull/42938
extern crate cargotest;
extern crate cargo;
extern crate hamcrest;
use std::str;
use cargo::util::process;
use cargotest::is_nightly;
use cargotest::support::paths::CargoPathExt;
use cargotest::support::{project, execs, basic_bin_manifest, basic_lib_manifest};
use hamcrest::{assert_that, existing_file};
#[test]
fn cargo_bench_simple() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "hello")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.5.0 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
.with_stdout_contains("test bench_hello ... bench: [..]"));
}
#[test]
fn bench_bench_implicit() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
fn main() { println!("Hello main!"); }"#)
.file("tests/other.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run3(_ben: &mut test::Bencher) { }"#)
.file("benches/mybench.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--benches"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]mybench-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run2 ... bench: [..]"));
}
#[test]
fn bench_bin_implicit() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }
fn main() { println!("Hello main!"); }"#)
.file("tests/other.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run3(_ben: &mut test::Bencher) { }"#)
.file("benches/mybench.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--bins"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run1 ... bench: [..]"));
}
#[test]
fn bench_tarname() {
if !is_nightly() { return }
let prj = project("foo")
.file("Cargo.toml" , r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("benches/bin1.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run1(_ben: &mut test::Bencher) { }"#)
.file("benches/bin2.rs", r#"
#![feature(test)]
extern crate test;
#[bench] fn run2(_ben: &mut test::Bencher) { }"#);
assert_that(prj.cargo_process("bench").arg("--bench").arg("bin2"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]bin2-[..][EXE]
", dir = prj.url()))
.with_stdout_contains("test run2 ... bench: [..]"));
}
#[test]
fn cargo_bench_verbose() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn main() {}
#[bench] fn bench_hello(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench").arg("-v").arg("hello"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.5.0 ({url})
[RUNNING] `rustc [..] src[/]foo.rs [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] hello --bench`", url = p.url()))
.with_stdout_contains("test bench_hello ... bench: [..]"));
}
#[test]
fn many_similar_names() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
pub fn foo() {}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
fn main() {}
#[bench] fn bin_bench(_b: &mut test::Bencher) { foo::foo() }
")
.file("benches/foo.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench] fn bench_bench(_b: &mut test::Bencher) { foo::foo() }
"#);
let output = p.cargo_process("bench").exec_with_output().unwrap();
let output = str::from_utf8(&output.stdout).unwrap();
assert!(output.contains("test bin_bench"), "bin_bench missing\n{}", output);
assert!(output.contains("test lib_bench"), "lib_bench missing\n{}", output);
assert!(output.contains("test bench_bench"), "bench_bench missing\n{}", output);
}
#[test]
fn cargo_bench_failing_test() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
#![feature(test)]
extern crate test;
fn hello() -> &'static str {
"hello"
}
pub fn main() {
println!("{}", hello())
}
#[bench]
fn bench_hello(_b: &mut test::Bencher) {
assert_eq!(hello(), "nope")
}"#);
assert_that(p.cargo_process("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_stdout("hello\n"));
assert_that(p.cargo("bench"),
execs().with_stdout_contains("test bench_hello ... ")
.with_stderr_contains(format!("\
[COMPILING] foo v0.5.0 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
thread '[..]' panicked at 'assertion failed: \
`(left == right)`[..]", p.url()))
.with_stderr_contains("[..]left: `\"hello\"`[..]")
.with_stderr_contains("[..]right: `\"nope\"`[..]")
.with_stderr_contains("[..]src[/]foo.rs:14[..]")
.with_status(101));
}
#[test]
fn bench_with_lib_dep() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bin]]
name = "baz"
path = "src/main.rs"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
///
/// ```rust
/// extern crate foo;
/// fn main() {
/// println!("{}", foo::foo());
/// }
/// ```
///
pub fn foo(){}
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
"#)
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
fn main() {}
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]baz-[..][EXE]", p.url()))
.with_stdout_contains("test lib_bench ... bench: [..]")
.with_stdout_contains("test bin_bench ... bench: [..]"));
}
#[test]
fn bench_with_deep_lib_dep() {
if !is_nightly() { return }
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[dependencies.foo]
path = "../foo"
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn bar_bench(_b: &mut test::Bencher) {
foo::foo();
}
");
let p2 = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
");
p2.build();
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ([..])
[COMPILING] bar v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bar_bench ... bench: [..]"));
}
#[test]
fn external_bench_explicit() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[[bench]]
name = "bench"
path = "src/bench.rs"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("src/bench.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", p.url()))
.with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]"));
}
#[test]
fn external_bench_implicit() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
pub fn get_hello() -> &'static str { "Hello" }
#[bench]
fn internal_bench(_b: &mut test::Bencher) {}
"#)
.file("benches/external.rs", r#"
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn external_bench(_b: &mut test::Bencher) {}
"#);
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]external-[..][EXE]", p.url()))
.with_stdout_contains("test internal_bench ... bench: [..]")
.with_stdout_contains("test external_bench ... bench: [..]"));
}
#[test]
fn dont_run_examples() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", r#"
"#)
.file("examples/dont-run-me-i-will-fail.rs", r#"
fn main() { panic!("Examples should not be run by 'cargo test'"); }
"#);
assert_that(p.cargo_process("bench"),
execs().with_status(0));
}
#[test]
fn pass_through_command_line() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench] fn foo(_b: &mut test::Bencher) {}
#[bench] fn bar(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench").arg("bar"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bar ... bench: [..]"));
assert_that(p.cargo("bench").arg("foo"),
execs().with_status(0)
.with_stderr("[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test foo ... bench: [..]"));
}
// Regression test for running cargo-bench twice with
// tests in an rlib
#[test]
fn cargo_bench_twice() {
if !is_nightly() { return }
let p = project("test_twice")
.file("Cargo.toml", &basic_lib_manifest("test_twice"))
.file("src/test_twice.rs", r#"
#![crate_type = "rlib"]
#![feature(test)]
extern crate test;
#[bench]
fn dummy_bench(b: &mut test::Bencher) { }
"#);
p.cargo_process("build");
for _ in 0..2 {
assert_that(p.cargo("bench"),
execs().with_status(0));
}
}
#[test]
fn lib_bin_same_name() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench] fn lib_bench(_b: &mut test::Bencher) {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate foo;
extern crate test;
#[bench]
fn bin_bench(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", p.url()))
.with_stdout_contains_n("test [..] ... bench: [..]", 2));
}
#[test]
fn lib_with_standard_name() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
/// ```
/// syntax::foo();
/// ```
pub fn foo() {}
#[bench]
fn foo_bench(_b: &mut test::Bencher) {}
")
.file("benches/bench.rs", "
#![feature(test)]
extern crate syntax;
extern crate test;
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]
[RUNNING] target[/]release[/]deps[/]bench-[..][EXE]", dir = p.url()))
.with_stdout_contains("test foo_bench ... bench: [..]")
.with_stdout_contains("test bench ... bench: [..]"));
}
#[test]
fn lib_with_standard_name2() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
[lib]
name = "syntax"
bench = false
doctest = false
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
#![feature(test)]
extern crate syntax;
extern crate test;
fn main() {}
#[bench]
fn bench(_b: &mut test::Bencher) { syntax::foo() }
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]syntax-[..][EXE]", dir = p.url()))
.with_stdout_contains("test bench ... bench: [..]"));
}
#[test]
fn bench_dylib() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate_type = ["dylib"]
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate bar as the_bar;
extern crate test;
pub fn bar() { the_bar::baz(); }
#[bench]
fn foo(_b: &mut test::Bencher) {}
"#)
.file("benches/bench.rs", r#"
#![feature(test)]
extern crate foo as the_foo;
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) { the_foo::bar(); }
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "
pub fn baz() {}
");
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({dir}/bar)
[RUNNING] [..] -C opt-level=3 [..]
[COMPILING] foo v0.0.1 ({dir})
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[RUNNING] [..] -C opt-level=3 [..]
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
.with_stdout_contains_n("test foo ... bench: [..]", 2));
p.root().move_into_the_past();
assert_that(p.cargo("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[FRESH] bar v0.0.1 ({dir}/bar)
[FRESH] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `[..]target[/]release[/]deps[/]foo-[..][EXE] --bench`
[RUNNING] `[..]target[/]release[/]deps[/]bench-[..][EXE] --bench`", dir = p.url()))
.with_stdout_contains_n("test foo ... bench: [..]", 2));
}
#[test]
fn bench_twice_with_build_cmd() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
build = "build.rs"
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "
#![feature(test)]
extern crate test;
#[bench]
fn foo(_b: &mut test::Bencher) {}
");
assert_that(p.cargo_process("bench"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.1 ({dir})
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]", dir = p.url()))
.with_stdout_contains("test foo ... bench: [..]"));
assert_that(p.cargo("bench"),
execs().with_status(0)
.with_stderr("[FINISHED] release [optimized] target(s) in [..]
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test foo ... bench: [..]"));
}
#[test]
fn bench_with_examples() {
if !is_nightly() { return }
let p = project("testbench")
.file("Cargo.toml", r#"
[package]
name = "testbench"
version = "6.6.6"
authors = []
[[example]]
name = "teste1"
[[bench]]
name = "testb1"
"#)
.file("src/lib.rs", r#"
#![feature(test)]
extern crate test;
#[cfg(test)]
use test::Bencher;
pub fn f1() {
println!("f1");
}
pub fn f2() {}
#[bench]
fn bench_bench1(_b: &mut Bencher) {
f2();
}
"#)
.file("benches/testb1.rs", "
#![feature(test)]
extern crate testbench;
extern crate test;
use test::Bencher;
#[bench]
fn bench_bench2(_b: &mut Bencher) {
testbench::f2();
}
")
.file("examples/teste1.rs", r#"
extern crate testbench;
fn main() {
println!("example1");
testbench::f1();
}
"#);
assert_that(p.cargo_process("bench").arg("-v"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] testbench v6.6.6 ({url})
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[RUNNING] `rustc [..]`
[FINISHED] release [optimized] target(s) in [..]
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testbench-[..][EXE] --bench`
[RUNNING] `{dir}[/]target[/]release[/]deps[/]testb1-[..][EXE] --bench`",
dir = p.root().display(), url = p.url()))
.with_stdout_contains("test bench_bench1 ... bench: [..]")
.with_stdout_contains("test bench_bench2 ... bench: [..]"));
}
#[test]
fn test_a_bench() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[lib]
name = "foo"
test = false
doctest = false
[[bench]]
name = "b"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/b.rs", r#"
#[test]
fn foo() {}
"#);
assert_that(p.cargo_process("test"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
[RUNNING] target[/]debug[/]deps[/]b-[..][EXE]")
.with_stdout_contains("test foo ... ok"));
}
#[test]
fn test_bench_no_run() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
"#)
.file("src/lib.rs", "")
.file("benches/bbaz.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_baz(_: &mut Bencher) {}
"#);
assert_that(p.cargo_process("bench").arg("--no-run"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 ([..])
[FINISHED] release [optimized] target(s) in [..]
"));
}
#[test]
fn test_bench_multiple_packages() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[dependencies.bar]
path = "../bar"
[dependencies.baz]
path = "../baz"
"#)
.file("src/lib.rs", "");
let bar = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
authors = []
version = "0.1.0"
[[bench]]
name = "bbar"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/bbar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_b: &mut Bencher) {}
"#);
bar.build();
let baz = project("baz")
.file("Cargo.toml", r#"
[project]
name = "baz"
authors = []
version = "0.1.0"
[[bench]]
name = "bbaz"
test = true
"#)
.file("src/lib.rs", "")
.file("benches/bbaz.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_baz(_b: &mut Bencher) {}
"#);
baz.build();
assert_that(p.cargo_process("bench").arg("-p").arg("bar").arg("-p").arg("baz"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bbaz-[..][EXE]")
.with_stdout_contains("test bench_baz ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bbar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]"));
}
#[test]
fn bench_all_workspace() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("benches/foo.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("bar/benches/bar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
"#);
assert_that(p.cargo_process("bench")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test bench_foo ... bench: [..]"));
}
#[test]
fn bench_all_exclude() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
#![feature(test)]
extern crate test;
#[bench]
pub fn bar(b: &mut test::Bencher) {
b.iter(|| {});
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.1.0"
"#)
.file("baz/src/lib.rs", r#"
#[test]
pub fn baz() {
break_the_build();
}
"#);
assert_that(p.cargo_process("bench")
.arg("--all")
.arg("--exclude")
.arg("baz"),
execs().with_status(0)
.with_stdout_contains("\
running 1 test
test bar ... bench: [..] ns/iter (+/- [..])"));
}
#[test]
fn bench_all_virtual_manifest() {
if !is_nightly() { return }
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("foo/benches/foo.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_foo(_: &mut Bencher) -> () { () }
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("bar/benches/bar.rs", r#"
#![feature(test)]
extern crate test;
use test::Bencher;
#[bench]
fn bench_bar(_: &mut Bencher) -> () { () }
"#);
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo_process("bench")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]bar-[..][EXE]")
.with_stdout_contains("test bench_bar ... bench: [..]")
.with_stderr_contains("\
[RUNNING] target[/]release[/]deps[/]foo-[..][EXE]")
.with_stdout_contains("test bench_foo ... bench: [..]"));
}
|
extern crate cargo;
#[macro_use]
extern crate cargotest;
extern crate hamcrest;
extern crate tempdir;
use std::env;
use std::fs::{self, File};
use std::io::prelude::*;
use cargo::util::paths::dylib_path_envvar;
use cargo::util::{process, ProcessBuilder};
use cargotest::{is_nightly, rustc_host, sleep_ms};
use cargotest::support::paths::{CargoPathExt,root};
use cargotest::support::{ProjectBuilder};
use cargotest::support::{project, execs, main_file, basic_bin_manifest};
use cargotest::support::registry::Package;
use cargotest::ChannelChanger;
use hamcrest::{assert_that, existing_file, existing_dir, is_not};
use tempdir::TempDir;
#[test]
fn cargo_compile_simple() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
}
#[test]
fn cargo_fail_with_no_stderr() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &String::from("refusal"))
.build();
assert_that(p.cargo("build").arg("--message-format=json"), execs().with_status(101)
.with_stderr_does_not_contain("--- stderr"));
}
/// Check that the `CARGO_INCREMENTAL` environment variable results in
/// `rustc` getting `-Zincremental` passed to it.
#[test]
fn cargo_compile_incremental() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
.with_status(0));
assert_that(
p.cargo("test").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
.with_status(0));
}
#[test]
fn incremental_profile() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[profile.dev]
incremental = false
[profile.release]
incremental = true
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("--release").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("--release").arg("-v").env("CARGO_INCREMENTAL", "0"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
}
#[test]
fn incremental_config() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file(".cargo/config", r#"
[build]
incremental = false
"#)
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
}
#[test]
fn cargo_compile_manifest_path() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build")
.arg("--manifest-path").arg("foo/Cargo.toml")
.cwd(p.root().parent().unwrap()),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_manifest() {
let p = project("foo")
.file("Cargo.toml", "")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
virtual manifests must be configured with [workspace]
"))
}
#[test]
fn cargo_compile_with_invalid_manifest2() {
let p = project("foo")
.file("Cargo.toml", r"
[project]
foo = bar
")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 3
"))
}
#[test]
fn cargo_compile_with_invalid_manifest3() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/Cargo.toml", "a = bar")
.build();
assert_that(p.cargo("build").arg("--manifest-path")
.arg("src/Cargo.toml"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 1
"))
}
#[test]
fn cargo_compile_duplicate_build_targets() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "main"
path = "src/main.rs"
crate-type = ["dylib"]
[dependencies]
"#)
.file("src/main.rs", r#"
#![allow(warnings)]
fn main() {}
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(0)
.with_stderr("\
warning: file found to be present in multiple build targets: [..]main.rs
[COMPILING] foo v0.0.1 ([..])
[FINISHED] [..]
"));
}
#[test]
fn cargo_compile_with_invalid_version() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "1.0"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Expected dot for key `project.version`
"))
}
#[test]
fn cargo_compile_with_invalid_package_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = ""
authors = []
version = "0.0.0"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
package name cannot be an empty string
"))
}
#[test]
fn cargo_compile_with_invalid_bin_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = ""
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
binary target names cannot be empty
"))
}
#[test]
fn cargo_compile_with_forbidden_bin_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "build"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the binary target name `build` is forbidden
"))
}
#[test]
fn cargo_compile_with_invalid_lib_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[lib]
name = ""
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
library target names cannot be empty
"))
}
#[test]
fn cargo_compile_without_manifest() {
let tmpdir = TempDir::new("cargo").unwrap();
let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()).build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory
"));
}
#[test]
fn cargo_compile_with_invalid_code() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "invalid rust code!")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr_contains("\
[ERROR] Could not compile `foo`.
To learn more, run the command again with --verbose.\n"));
assert_that(&p.root().join("Cargo.lock"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_code_in_deps() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "../bar"
[dependencies.baz]
path = "../baz"
"#)
.file("src/main.rs", "invalid rust code!")
.build();
let _bar = project("bar")
.file("Cargo.toml", &basic_bin_manifest("bar"))
.file("src/lib.rs", "invalid rust code!")
.build();
let _baz = project("baz")
.file("Cargo.toml", &basic_bin_manifest("baz"))
.file("src/lib.rs", "invalid rust code!")
.build();
assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn cargo_compile_with_warnings_in_the_root_package() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "fn main() {} fn dead() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0).with_stderr_contains("\
[..]function is never used: `dead`[..]
"));
}
#[test]
fn cargo_compile_with_warnings_in_a_dep_package() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
pub fn gimme() -> &'static str {
"test passed"
}
fn dead() {}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(0).with_stderr_contains("\
[..]function is never used: `dead`[..]
"));
assert_that(&p.bin("foo"), existing_file());
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_inferred() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = 'bar'
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("baz/src/lib.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_correct_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("baz/src/lib.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_shorthand() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_longhand() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
version = "0.5.0"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
version = "0.5.0"
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
assert_that(p.cargo("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
// Check that Cargo gives a sensible error if a dependency can't be found
// because of a name mismatch.
#[test]
fn cargo_compile_with_dep_name_mismatch() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = ["wycats@example.com"]
[[bin]]
name = "foo"
[dependencies.notquitebar]
path = "bar"
"#)
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
.file("bar/Cargo.toml", &basic_bin_manifest("bar"))
.file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
.build();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr(&format!(
r#"[ERROR] no matching package named `notquitebar` found (required by `foo`)
location searched: {proj_dir}/bar
version required: *
"#, proj_dir = p.url())));
}
#[test]
fn cargo_compile_with_filename() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", r#"
extern crate foo;
fn main() { println!("hello a.rs"); }
"#)
.file("examples/a.rs", r#"
fn main() { println!("example"); }
"#)
.build();
assert_that(p.cargo("build").arg("--bin").arg("bin.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no bin target named `bin.rs`"));
assert_that(p.cargo("build").arg("--bin").arg("a.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no bin target named `a.rs`
Did you mean `a`?"));
assert_that(p.cargo("build").arg("--example").arg("example.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no example target named `example.rs`"));
assert_that(p.cargo("build").arg("--example").arg("a.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no example target named `a.rs`
Did you mean `a`?"));
}
#[test]
fn cargo_compile_path_with_offline() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0));
}
#[test]
fn cargo_compile_with_downloaded_dependency_with_offline() {
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.3"
"#)
.file("src/lib.rs", "")
.publish();
{
// make package downloaded
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),execs().with_status(0));
}
let p2 = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p2.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] present_dep v1.2.3
[COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")));
}
#[test]
fn cargo_compile_offline_not_try_update() {
let p = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
not_cached_dep = "1.2.5"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(101)
.with_stderr("\
error: no matching package named `not_cached_dep` found (required by `bar`)
location searched: registry `[..]`
version required: ^1.2.5
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag."));
}
#[test]
fn compile_offline_without_maxvers_cached(){
Package::new("present_dep", "1.2.1").publish();
Package::new("present_dep", "1.2.2").publish();
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.3"
"#)
.file("src/lib.rs", r#"pub fn get_version()->&'static str {"1.2.3"}"#)
.publish();
Package::new("present_dep", "1.2.5")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.5"
"#)
.file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
.publish();
{
// make package cached
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "=1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),execs().with_status(0));
}
let p2 = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2"
"#)
.file("src/main.rs", "\
extern crate present_dep;
fn main(){
println!(\"{}\", present_dep::get_version());
}")
.build();
assert_that(p2.cargo("run").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] present_dep v1.2.3
[COMPILING] foo v0.1.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
Running `[..]`", url = p2.url()))
.with_stdout("1.2.3")
);
}
#[test]
fn compile_offline_while_transitive_dep_not_cached() {
let bar = Package::new("bar", "1.0.0");
let bar_path = bar.archive_dst();
bar.publish();
let mut content = Vec::new();
let mut file = File::open(bar_path.clone()).ok().unwrap();
let _ok = file.read_to_end(&mut content).ok().unwrap();
drop(file);
drop(File::create(bar_path.clone()).ok().unwrap() );
Package::new("foo", "0.1.0").dep("bar", "1.0.0").publish();
let p = project("transitive_load_test")
.file("Cargo.toml", r#"
[project]
name = "transitive_load_test"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
"#)
.file("src/main.rs", "fn main(){}")
.build();
// simulate download foo, but fail to download bar
let _out = p.cargo("build").exec_with_output();
drop( File::create(bar_path).ok().unwrap().write_all(&content) );
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(101)
.with_stderr("\
error: no matching package named `bar` found (required by `foo`)
location searched: registry `[..]`
version required: = 1.0.0
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag."));
}
#[test]
fn compile_path_dep_then_change_version() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#"
[package]
name = "bar"
version = "0.0.2"
authors = []
"#).unwrap();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr("\
[ERROR] no matching version `= 0.0.1` found for package `bar` (required by `foo`)
location searched: [..]
versions found: 0.0.2
consider running `cargo update` to update a path dependency's locked version
"));
}
#[test]
fn ignores_carriage_return_in_lockfile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
"#)
.file("src/main.rs", r#"
mod a; fn main() {}
"#)
.file("src/a.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
let lockfile = p.root().join("Cargo.lock");
let mut lock = String::new();
File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap();
let lock = lock.replace("\n", "\r\n");
File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn cargo_default_env_metadata_env_var() {
// Ensure that path dep + dylib + env_var get metadata
// (even though path_dep + dylib should not)
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "// hi")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "// hello")
.build();
// No metadata on libbar since it's a dylib path dependency
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)));
assert_that(p.cargo("clean"), execs().with_status(0));
// If you set the env-var, then we expect metadata on libbar
assert_that(p.cargo("build").arg("-v").env("__CARGO_DEFAULT_LIB_METADATA", "stable"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar-[..]{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)));
}
#[test]
fn crate_env_vars() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
description = "This is foo"
homepage = "http://example.com"
authors = ["wycats@example.com"]
"#)
.file("src/main.rs", r#"
extern crate foo;
static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE");
static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION");
fn main() {
let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
CARGO_MANIFEST_DIR);
assert_eq!(s, foo::version());
println!("{}", s);
assert_eq!("foo", PKG_NAME);
assert_eq!("http://example.com", HOMEPAGE);
assert_eq!("This is foo", DESCRIPTION);
let s = format!("{}.{}.{}-{}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
assert_eq!(s, VERSION);
}
"#)
.file("src/lib.rs", r#"
pub fn version() -> String {
format!("{}-{}-{} @ {} in {}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
env!("CARGO_PKG_VERSION_PRE"),
env!("CARGO_MANIFEST_DIR"))
}
"#)
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout(&format!("0-5-1 @ alpha.1 in {}\n",
p.root().display())));
println!("test");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
}
#[test]
fn crate_authors_env_vars() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
authors = ["wycats@example.com", "neikos@example.com"]
"#)
.file("src/main.rs", r#"
extern crate foo;
static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
fn main() {
let s = "wycats@example.com:neikos@example.com";
assert_eq!(AUTHORS, foo::authors());
println!("{}", AUTHORS);
assert_eq!(s, AUTHORS);
}
"#)
.file("src/lib.rs", r#"
pub fn authors() -> String {
format!("{}", env!("CARGO_PKG_AUTHORS"))
}
"#)
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("wycats@example.com:neikos@example.com"));
println!("test");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
}
// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
fn setenv_for_removing_empty_component(mut p: ProcessBuilder) -> ProcessBuilder {
let v = dylib_path_envvar();
if let Ok(search_path) = env::var(v) {
let new_search_path =
env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
.expect("join_paths");
p.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
}
p
}
// Regression test for #4277
#[test]
fn crate_library_path_env_var() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", &format!(r##"
fn main() {{
let search_path = env!("{}");
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert!(!paths.contains(&"".into()));
}}
"##, dylib_path_envvar()))
.build();
assert_that(setenv_for_removing_empty_component(p.cargo("run")),
execs().with_status(0));
}
// Regression test for #4277
#[test]
fn build_with_fake_libc_not_loading() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("src/lib.rs", r#" "#)
.file("libc.so.6", r#""#)
.build();
assert_that(setenv_for_removing_empty_component(p.cargo("build")),
execs().with_status(0));
}
// this is testing that src/<pkg-name>.rs still works (for now)
#[test]
fn many_crate_types_old_style_lib_location() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#)
.file("src/foo.rs", r#"
pub fn foo() {}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]foo.rs` was erroneously implicitly accepted for library `foo`,
please rename the file to `src/lib.rs` or set lib.path in Cargo.toml"));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn many_crate_types_correct() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#)
.file("src/lib.rs", r#"
pub fn foo() {}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn self_dependency() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.test]
path = "."
[lib]
name = "test"
path = "src/test.rs"
"#)
.file("src/test.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] cyclic package dependency: package `test v0.0.0 ([..])` depends on itself
"));
}
#[test]
fn ignore_broken_symlinks() {
// windows and symlinks don't currently agree that well
if cfg!(windows) { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.symlink("Notafile", "bar")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
}
#[test]
fn missing_lib_and_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]Cargo.toml`
Caused by:
no targets specified in the manifest
either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n"));
}
#[test]
fn lto_build() {
// FIXME: currently this hits a linker bug on 32-bit MSVC
if cfg!(all(target_env = "msvc", target_pointer_width = "32")) {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[profile.release]
lto = true
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]main.rs --crate-type bin \
--emit=dep-info,link \
-C opt-level=3 \
-C lto \
-C metadata=[..] \
--out-dir {dir}[/]target[/]release[/]deps \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_build() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_release_build() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_release_build_deps() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.foo]
path = "foo"
"#)
.file("src/lib.rs", "")
.file("foo/Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
crate_type = ["dylib", "rlib"]
"#)
.file("foo/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url}/foo)
[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \
--crate-type dylib --crate-type rlib \
--emit=dep-info,link \
-C prefer-dynamic \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps`
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps \
--extern foo={dir}[/]target[/]release[/]deps[/]{prefix}foo{suffix} \
--extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX)));
}
#[test]
fn explicit_examples() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[lib]
name = "world"
path = "src/lib.rs"
[[example]]
name = "hello"
path = "examples/ex-hello.rs"
[[example]]
name = "goodbye"
path = "examples/ex-goodbye.rs"
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#)
.file("examples/ex-hello.rs", r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); }
"#)
.file("examples/ex-goodbye.rs", r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
"#)
.build();
assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
assert_that(process(&p.bin("examples/hello")),
execs().with_status(0).with_stdout("Hello, World!\n"));
assert_that(process(&p.bin("examples/goodbye")),
execs().with_status(0).with_stdout("Goodbye, World!\n"));
}
#[test]
fn non_existing_example() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[lib]
name = "world"
path = "src/lib.rs"
[[example]]
name = "hello"
"#)
.file("src/lib.rs", "")
.file("examples/ehlo.rs", "")
.build();
assert_that(p.cargo("test").arg("-v"), execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `hello` example, specify example.path"));
}
#[test]
fn non_existing_binary() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[[bin]]
name = "hello"
"#)
.file("src/lib.rs", "")
.file("src/bin/ehlo.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `hello` bin, specify bin.path"));
}
#[test]
fn legacy_binary_paths_warinigs() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]bin[/]main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/bar.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]bar.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
}
#[test]
fn implicit_examples() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#)
.file("examples/hello.rs", r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_hello(), world::get_world());
}
"#)
.file("examples/goodbye.rs", r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_goodbye(), world::get_world());
}
"#)
.build();
assert_that(p.cargo("test"), execs().with_status(0));
assert_that(process(&p.bin("examples/hello")),
execs().with_status(0).with_stdout("Hello, World!\n"));
assert_that(process(&p.bin("examples/goodbye")),
execs().with_status(0).with_stdout("Goodbye, World!\n"));
}
#[test]
fn standard_build_no_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("slow\n"));
}
#[test]
fn release_build_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#)
.build();
assert_that(p.cargo("build").arg("--release"),
execs().with_status(0));
assert_that(process(&p.release_bin("foo")),
execs().with_status(0).with_stdout("fast\n"));
}
#[test]
fn inferred_main_bin() {
let p = project("world")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")), execs().with_status(0));
}
#[test]
fn deletion_causes_failure() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", r#"
extern crate bar;
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
p.change_file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#);
assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn bad_cargo_toml_in_target_dir() {
let p = project("world")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("target/Cargo.toml", "bad-toml")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")), execs().with_status(0));
}
#[test]
fn lib_with_standard_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() { syntax::foo() }
")
.build();
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.url())));
}
#[test]
fn simple_staticlib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib"]
"#)
.file("src/lib.rs", "pub fn foo() {}")
.build();
// env var is a test for #1381
assert_that(p.cargo("build").env("RUST_LOG", "nekoneko=trace"),
execs().with_status(0));
}
#[test]
fn staticlib_rlib_and_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib", "rlib"]
"#)
.file("src/lib.rs", "pub fn foo() {}")
.file("src/main.rs", r#"
extern crate foo;
fn main() {
foo::foo();
}"#)
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn opt_out_of_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
bin = []
[package]
name = "foo"
authors = []
version = "0.0.1"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "bad syntax")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn single_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
path = "src/bar.rs"
"#)
.file("src/bar.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn freshness_ignores_excluded() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
exclude = ["src/b*.rs"]
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
// Smoke test to make sure it doesn't compile again
println!("first pass");
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stdout(""));
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
File::create(&foo.root().join("src/bar.rs")).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stdout(""));
}
#[test]
fn rebuild_preserves_out_dir() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 'build.rs'
"#)
.file("build.rs", r#"
use std::env;
use std::fs::File;
use std::path::Path;
fn main() {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo");
if env::var_os("FIRST").is_some() {
File::create(&path).unwrap();
} else {
File::create(&path).unwrap();
}
}
"#)
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
assert_that(foo.cargo("build").env("FIRST", "1"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
File::create(&foo.root().join("src/bar.rs")).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
}
#[test]
fn dep_no_libs() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.0"
authors = []
"#)
.file("bar/src/main.rs", "")
.build();
assert_that(foo.cargo("build"),
execs().with_status(0));
}
#[test]
fn recompile_space_in_name() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
path = "src/my lib.rs"
"#)
.file("src/my lib.rs", "")
.build();
assert_that(foo.cargo("build"), execs().with_status(0));
foo.root().move_into_the_past();
assert_that(foo.cargo("build"),
execs().with_status(0).with_stdout(""));
}
#[cfg(unix)]
#[test]
fn ignore_bad_directories() {
use std::os::unix::prelude::*;
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
let dir = foo.root().join("tmp");
fs::create_dir(&dir).unwrap();
let stat = fs::metadata(&dir).unwrap();
let mut perms = stat.permissions();
perms.set_mode(0o644);
fs::set_permissions(&dir, perms.clone()).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0));
perms.set_mode(0o755);
fs::set_permissions(&dir, perms).unwrap();
}
#[test]
fn bad_cargo_config() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.file(".cargo/config", r#"
this is not valid toml
"#)
.build();
assert_that(foo.cargo("build").arg("-v"),
execs().with_status(101).with_stderr("\
[ERROR] Couldn't load Cargo configuration
Caused by:
could not parse TOML configuration in `[..]`
Caused by:
could not parse input as TOML
Caused by:
expected an equals, found an identifier at line 2
"));
}
#[test]
fn cargo_platform_specific_dependency() {
let host = rustc_host();
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
build = "build.rs"
[target.{host}.dependencies]
dep = {{ path = "dep" }}
[target.{host}.build-dependencies]
build = {{ path = "build" }}
[target.{host}.dev-dependencies]
dev = {{ path = "dev" }}
"#, host = host))
.file("src/main.rs", r#"
extern crate dep;
fn main() { dep::dep() }
"#)
.file("tests/foo.rs", r#"
extern crate dev;
#[test]
fn foo() { dev::dev() }
"#)
.file("build.rs", r#"
extern crate build;
fn main() { build::build(); }
"#)
.file("dep/Cargo.toml", r#"
[project]
name = "dep"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("dep/src/lib.rs", "pub fn dep() {}")
.file("build/Cargo.toml", r#"
[project]
name = "build"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("build/src/lib.rs", "pub fn build() {}")
.file("dev/Cargo.toml", r#"
[project]
name = "dev"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("dev/src/lib.rs", "pub fn dev() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(p.cargo("test"),
execs().with_status(0));
}
#[test]
fn bad_platform_specific_dependency() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[target.wrong-target.dependencies.bar]
path = "bar"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
format!("")
}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(101));
}
#[test]
fn cargo_platform_specific_dependency_wrong_platform() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[target.non-existing-triplet.dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"
invalid rust file, should not be compiled
"#)
.build();
p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0));
let loc = p.root().join("Cargo.lock");
let mut lockfile = String::new();
File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap();
assert!(lockfile.contains("bar"))
}
#[test]
fn example_as_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["lib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "lib"), existing_file());
}
#[test]
fn example_as_rlib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["rlib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "rlib"), existing_file());
}
#[test]
fn example_as_dylib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["dylib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "dylib"), existing_file());
}
#[test]
fn example_as_proc_macro() {
if !is_nightly() {
return;
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["proc-macro"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "#![feature(proc_macro)]")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "proc-macro"), existing_file());
}
#[test]
fn example_bin_same_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.build();
p.cargo("test").arg("--no-run").arg("-v")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), is_not(existing_file()));
// We expect a file of the form bin/foo-{metadata_hash}
assert_that(&p.bin("examples/foo"), existing_file());
p.cargo("test").arg("--no-run").arg("-v")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), is_not(existing_file()));
// We expect a file of the form bin/foo-{metadata_hash}
assert_that(&p.bin("examples/foo"), existing_file());
}
#[test]
fn compile_then_delete() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("run").arg("-v"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
if cfg!(windows) {
// On windows unlinking immediately after running often fails, so sleep
sleep_ms(100);
}
fs::remove_file(&p.bin("foo")).unwrap();
assert_that(p.cargo("run").arg("-v"),
execs().with_status(0));
}
#[test]
fn transitive_dependencies_not_available() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.aaaaa]
path = "a"
"#)
.file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}")
.file("a/Cargo.toml", r#"
[package]
name = "aaaaa"
version = "0.0.1"
authors = []
[dependencies.bbbbb]
path = "../b"
"#)
.file("a/src/lib.rs", "extern crate bbbbb;")
.file("b/Cargo.toml", r#"
[package]
name = "bbbbb"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101)
.with_stderr_contains("\
[..] can't find crate for `bbbbb`[..]
"));
}
#[test]
fn cyclic_deps_rejected() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.a]
path = "a"
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dependencies.foo]
path = ".."
"#)
.file("a/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101)
.with_stderr("\
[ERROR] cyclic package dependency: package `a v0.0.1 ([..])` depends on itself
"));
}
#[test]
fn predictable_filenames() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate-type = ["dylib", "rlib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(dylib_name),
existing_file());
}
#[test]
fn dashes_to_underscores() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo-bar"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("foo-bar"), existing_file());
}
#[test]
fn dashes_in_crate_name_bad() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo-bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101));
}
#[test]
fn rustc_env_var() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build")
.env("RUSTC", "rustc-that-does-not-exist").arg("-v"),
execs().with_status(101)
.with_stderr("\
[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
Caused by:
[..]
"));
assert_that(&p.bin("a"), is_not(existing_file()));
}
#[test]
fn filtering() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--lib"),
execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"),
execs().with_status(0));
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
assert_that(&p.bin("examples/b"), is_not(existing_file()));
}
#[test]
fn filtering_implicit_bins() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--bins"),
execs().with_status(0));
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), existing_file());
assert_that(&p.bin("examples/a"), is_not(existing_file()));
assert_that(&p.bin("examples/b"), is_not(existing_file()));
}
#[test]
fn filtering_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--examples"),
execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
assert_that(&p.bin("examples/b"), existing_file());
}
#[test]
fn ignore_dotfile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/.a.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn ignore_dotdirs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/a.rs", "fn main() {}")
.file(".git/Cargo.toml", "")
.file(".pc/dummy-fix.patch/Cargo.toml", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn dotdir_root() {
let p = ProjectBuilder::new("foo", root().join(".foo"))
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/a.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn custom_target_dir() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"),
execs().with_status(0));
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
is_not(existing_file()));
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
existing_file());
fs::create_dir(p.root().join(".cargo")).unwrap();
File::create(p.root().join(".cargo/config")).unwrap().write_all(br#"
[build]
target-dir = "foo/target"
"#).unwrap();
assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"),
execs().with_status(0));
assert_that(&p.root().join("bar/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
existing_file());
}
#[test]
fn rustc_no_trans() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"),
execs().with_status(0));
}
#[test]
fn build_multiple_packages() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[[bin]]
name = "d1"
"#)
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.file("d2/Cargo.toml", r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[[bin]]
name = "d2"
doctest = false
"#)
.file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2")
.arg("-p").arg("foo"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
let d1_path = &p.build_dir().join("debug")
.join(format!("d1{}", env::consts::EXE_SUFFIX));
let d2_path = &p.build_dir().join("debug")
.join(format!("d2{}", env::consts::EXE_SUFFIX));
assert_that(d1_path, existing_file());
assert_that(process(d1_path), execs().with_status(0).with_stdout("d1"));
assert_that(d2_path, existing_file());
assert_that(process(d2_path),
execs().with_status(0).with_stdout("d2"));
}
#[test]
fn invalid_spec() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[[bin]]
name = "foo"
"#)
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[[bin]]
name = "d1"
"#)
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.build();
assert_that(p.cargo("build").arg("-p").arg("notAValidDep"),
execs().with_status(101).with_stderr("\
[ERROR] package id specification `notAValidDep` matched no packages
"));
assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"),
execs().with_status(101).with_stderr("\
[ERROR] package id specification `notAValidDep` matched no packages
"));
}
#[test]
fn manifest_with_bom_is_ok() {
let p = project("foo")
.file("Cargo.toml", "\u{FEFF}
[package]
name = \"foo\"
version = \"0.0.1\"
authors = []
")
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
}
#[test]
fn panic_abort_compiles_with_panic_abort() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[profile.dev]
panic = 'abort'
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr_contains("[..] -C panic=abort [..]"));
}
#[test]
fn explicit_color_config_is_propagated_to_rustc() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--color").arg("always"),
execs().with_status(0).with_stderr_contains(
"[..]rustc [..] src[/]lib.rs --color always[..]"));
assert_that(p.cargo("clean"), execs().with_status(0));
assert_that(p.cargo("build").arg("-v").arg("--color").arg("never"),
execs().with_status(0).with_stderr("\
[COMPILING] test v0.0.0 ([..])
[RUNNING] `rustc [..] --color never [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn compiler_json_error_format() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[profile.dev]
debug = false # prevent the *.dSYM from affecting the test result
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", "fn main() { let unused = 92; }")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"fn dead() {}"#)
.build();
assert_that(p.cargo("build").arg("-v")
.arg("--message-format").arg("json"),
execs().with_status(0).with_json(r#"
{
"reason":"compiler-message",
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": false
}
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": ["[..]"],
"fresh": false
}
"#));
// With fresh build, we should repeat the artifacts,
// but omit compiler warnings.
assert_that(p.cargo("build").arg("-v")
.arg("--message-format").arg("json"),
execs().with_status(0).with_json(r#"
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": true
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": ["[..]"],
"fresh": true
}
"#));
}
#[test]
fn wrong_message_format_option() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--message-format").arg("XML"),
execs().with_status(1)
.with_stderr_contains(
r#"[ERROR] Could not match 'xml' with any of the allowed variants: ["Human", "Json"]"#));
}
#[test]
fn message_format_json_forward_stderr() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() { let unused = 0; }")
.build();
assert_that(p.cargo("rustc").arg("--release").arg("--bin").arg("foo")
.arg("--message-format").arg("JSON"),
execs().with_status(0)
.with_json(r#"
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]"
},
"profile":{
"debug_assertions":false,
"debuginfo":null,
"opt_level":"3",
"overflow_checks": false,
"test":false
},
"features":[],
"filenames":["[..]"],
"fresh": false
}
"#));
}
#[test]
fn no_warn_about_package_metadata() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[package.metadata]
foo = "bar"
a = true
b = 3
[package.metadata.another]
bar = 3
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("[..] foo v0.0.1 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn cargo_build_empty_target() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--target").arg(""),
execs().with_status(101)
.with_stderr_contains("[..] target was empty"));
}
#[test]
fn build_all_workspace() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_all_exclude() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.1.0"
"#)
.file("baz/src/lib.rs", r#"
pub fn baz() {
break_the_build();
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all")
.arg("--exclude")
.arg("baz"),
execs().with_status(0)
.with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
.with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
.with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]"));
}
#[test]
fn build_all_workspace_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/c.rs", "fn main() {}")
.file("examples/d.rs", "fn main() {}")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/g.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
assert_that(p.cargo("build")
.arg("--all").arg("--examples"),
execs().with_status(0)
.with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
assert_that(&p.bin("examples/d"), existing_file());
assert_that(&p.bin("e"), is_not(existing_file()));
assert_that(&p.bin("f"), is_not(existing_file()));
assert_that(&p.bin("examples/g"), existing_file());
assert_that(&p.bin("examples/h"), existing_file());
}
#[test]
fn build_all_virtual_manifest() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_virtual_manifest_all_implied() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_virtual_manifest_one_project() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
assert_that(p.cargo("build")
.arg("-p").arg("foo"),
execs().with_status(0)
.with_stderr_does_not_contain("bar")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_all_virtual_manifest_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", "")
.file("foo/src/bin/a.rs", "fn main() {}")
.file("foo/src/bin/b.rs", "fn main() {}")
.file("foo/examples/c.rs", "fn main() {}")
.file("foo/examples/d.rs", "fn main() {}")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/g.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build")
.arg("--all").arg("--examples"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
assert_that(&p.bin("examples/d"), existing_file());
assert_that(&p.bin("e"), is_not(existing_file()));
assert_that(&p.bin("f"), is_not(existing_file()));
assert_that(&p.bin("examples/g"), existing_file());
assert_that(&p.bin("examples/h"), existing_file());
}
#[test]
fn build_all_member_dependency_same_name() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["a"]
"#)
.file("a/Cargo.toml", r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
"#)
.file("a/src/lib.rs", r#"
pub fn a() {}
"#)
.build();
Package::new("a", "0.1.0").publish();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr("[..] Updating registry `[..]`\n\
[..] Downloading a v0.1.0 ([..])\n\
[..] Compiling a v0.1.0\n\
[..] Compiling a v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn run_proper_binary() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "main"
[[bin]]
name = "other"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", r#"
fn main() {
panic!("This should never be run.");
}
"#)
.file("src/bin/other.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("other"),
execs().with_status(0));
}
#[test]
fn run_proper_binary_main_rs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("foo"),
execs().with_status(0));
}
#[test]
fn run_proper_alias_binary_from_src() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/foo.rs", r#"
fn main() {
println!("foo");
}
"#).file("src/bar.rs", r#"
fn main() {
println!("bar");
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
);
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("foo\n"));
assert_that(process(&p.bin("bar")),
execs().with_status(0).with_stdout("bar\n"));
}
#[test]
fn run_proper_alias_binary_main_rs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/main.rs", r#"
fn main() {
println!("main");
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
);
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("main\n"));
assert_that(process(&p.bin("bar")),
execs().with_status(0).with_stdout("main\n"));
}
#[test]
fn run_proper_binary_main_rs_as_foo() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs", r#"
fn main() {
panic!("This should never be run.");
}
"#)
.file("src/main.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("foo"),
execs().with_status(0));
}
#[test]
fn rustc_wrapper() {
// We don't have /usr/bin/env on Windows.
if cfg!(windows) { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build").arg("-v").env("RUSTC_WRAPPER", "/usr/bin/env"),
execs().with_stderr_contains(
"[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
.with_status(0));
}
#[test]
fn cdylib_not_lifted() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
let files = if cfg!(windows) {
vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo.dylib"]
} else {
vec!["libfoo.so"]
};
for file in files {
println!("checking: {}", file);
assert_that(&p.root().join("target/debug/deps").join(&file),
existing_file());
}
}
#[test]
fn cdylib_final_outputs() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo-bar"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
let files = if cfg!(windows) {
vec!["foo_bar.dll.lib", "foo_bar.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo_bar.dylib"]
} else {
vec!["libfoo_bar.so"]
};
for file in files {
println!("checking: {}", file);
assert_that(&p.root().join("target/debug").join(&file), existing_file());
}
}
#[test]
fn wasm32_final_outputs() {
use cargo::core::{Shell, Target, Workspace};
use cargo::ops::{self, BuildConfig, Context, CompileMode, CompileOptions, Kind, Unit};
use cargo::util::Config;
use cargo::util::important_paths::find_root_manifest_for_wd;
let target_triple = "wasm32-unknown-emscripten";
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo-bar"
authors = []
version = "0.1.0"
"#)
.file("src/main.rs", "fn main() {}")
.build();
// We can't cross-compile the project to wasm target unless we have emscripten installed.
// So here we will not run `cargo build`, but just create cargo_rustc::Context and ask it
// what the target file names would be.
// Create various stuff required to build cargo_rustc::Context.
let shell = Shell::new();
let config = Config::new(shell, p.root(), p.root());
let root = find_root_manifest_for_wd(None, config.cwd()).expect("Can't find the root manifest");
let ws = Workspace::new(&root, &config).expect("Can't create workspace");
let opts = CompileOptions {
target: Some(target_triple),
.. CompileOptions::default(&config, CompileMode::Build)
};
let specs = opts.spec.into_package_id_specs(&ws).expect("Can't create specs");
let (packages, resolve) = ops::resolve_ws_precisely(
&ws,
None,
opts.features,
opts.all_features,
opts.no_default_features,
&specs,
).expect("Can't create resolve");
let build_config = BuildConfig {
requested_target: Some(target_triple.to_string()),
jobs: 1,
.. BuildConfig::default()
};
let pkgid = packages
.package_ids()
.filter(|id| id.name() == "foo-bar")
.collect::<Vec<_>>();
let pkg = packages.get(pkgid[0]).expect("Can't get package");
let target = Target::bin_target("foo-bar", p.root().join("src/main.rs"), None);
let unit = Unit {
pkg: &pkg,
target: &target,
profile: &ws.profiles().dev,
kind: Kind::Target,
};
let units = vec![unit];
// Finally, create the cargo_rustc::Context.
let mut ctx = Context::new(
&ws,
&resolve,
&packages,
&config,
build_config,
ws.profiles(),
).expect("Can't create context");
// Ask the context to resolve target file names.
ctx.probe_target_info(&units).expect("Can't probe target info");
let target_filenames = ctx.target_filenames(&unit).expect("Can't get target file names");
// Verify the result.
let mut expected = vec!["debug/foo-bar.js", "debug/foo_bar.wasm"];
assert_eq!(target_filenames.len(), expected.len());
let mut target_filenames = target_filenames
.iter()
.map(|&(_, ref link_dst, _)| link_dst.clone().unwrap())
.collect::<Vec<_>>();
target_filenames.sort();
expected.sort();
for (expected, actual) in expected.iter().zip(target_filenames.iter()) {
assert!(
actual.ends_with(expected),
format!("{:?} does not end with {}", actual, expected)
);
}
}
#[test]
fn deterministic_cfg_flags() {
// This bug is non-deterministic
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
build = "build.rs"
[features]
default = ["f_a", "f_b", "f_c", "f_d"]
f_a = []
f_b = []
f_c = []
f_d = []
"#)
.file("build.rs", r#"
fn main() {
println!("cargo:rustc-cfg=cfg_a");
println!("cargo:rustc-cfg=cfg_b");
println!("cargo:rustc-cfg=cfg_c");
println!("cargo:rustc-cfg=cfg_d");
println!("cargo:rustc-cfg=cfg_e");
}
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 [..]
[RUNNING] [..]
[RUNNING] [..]
[RUNNING] `rustc --crate-name foo [..] \
--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
--cfg[..]f_c[..]--cfg[..]f_d[..] \
--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"));
}
#[test]
fn explicit_bins_without_paths() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn no_bin_in_src_with_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "")
.file("src/foo.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr_contains("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `foo` bin, specify bin.path"));
}
#[test]
fn inferred_bins() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.file("src/bin/baz/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.bin("baz"), existing_file());
}
#[test]
fn inferred_bins_duplicate_name() {
// this should fail, because we have two binaries with the same name
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("src/bin/foo.rs", "fn main() {}")
.file("src/bin/foo/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr_contains("\
[..]found duplicate binary name foo, but all binary targets must have a unique name[..]
"));
}
#[test]
fn inferred_bin_path() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "bar"
# Note, no `path` key!
"#)
.file("src/bin/bar/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("bar"), existing_file());
}
#[test]
fn inferred_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("examples/bar.rs", "fn main() {}")
.file("examples/baz/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("test"), execs().with_status(0));
assert_that(&p.bin("examples/bar"), existing_file());
assert_that(&p.bin("examples/baz"), existing_file());
}
#[test]
fn inferred_tests() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("tests/bar.rs", "fn main() {}")
.file("tests/baz/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("test").arg("--test=bar").arg("--test=baz"),
execs().with_status(0));
}
#[test]
fn inferred_benchmarks() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("benches/bar.rs", "fn main() {}")
.file("benches/baz/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("bench").arg("--bench=bar").arg("--bench=baz"),
execs().with_status(0));
}
#[test]
fn same_metadata_different_directory() {
// A top-level crate built in two different workspaces should have the
// same metadata hash.
let p = project("foo1")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
let output = t!(String::from_utf8(
t!(p.cargo("build").arg("-v").exec_with_output())
.stderr,
));
let metadata = output
.split_whitespace()
.find(|arg| arg.starts_with("metadata="))
.unwrap();
let p = project("foo2")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(
p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr_contains(
format!("[..]{}[..]", metadata),
),
);
}
#[test]
fn building_a_dependent_crate_witout_bin_should_fail() {
Package::new("testless", "0.1.0")
.file("Cargo.toml", r#"
[project]
name = "testless"
version = "0.1.0"
[[bin]]
name = "a_bin"
"#)
.file("src/lib.rs", "")
.publish();
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
testless = "0.1.0"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr_contains(
"[..]can't find `a_bin` bin, specify bin.path"
));
}
#[test]
fn uplift_dsym_of_bin_on_mac() {
if !cfg!(any(target_os = "macos", target_os = "ios")) {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.file("tests/d.rs", "fn main() { panic!(); }")
.build();
assert_that(
p.cargo("build").arg("--bins").arg("--examples").arg("--tests"),
execs().with_status(0)
);
assert_that(&p.bin("foo.dSYM"), existing_dir());
assert_that(&p.bin("b.dSYM"), existing_dir());
assert!(
p.bin("b.dSYM")
.symlink_metadata()
.expect("read metadata from b.dSYM")
.file_type()
.is_symlink()
);
assert_that(&p.bin("c.dSYM"), is_not(existing_dir()));
assert_that(&p.bin("d.dSYM"), is_not(existing_dir()));
}
// Make sure that `cargo build` chooses the correct profile for building
// targets based on filters (assuming --profile is not specified).
#[test]
fn build_filter_infer_profile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("benches/b1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
p.root().join("target").rm_rf();
assert_that(p.cargo("build").arg("-v").arg("--test=t1"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name t1 tests[/]t1.rs --emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
p.root().join("target").rm_rf();
assert_that(p.cargo("build").arg("-v").arg("--bench=b1"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name b1 benches[/]b1.rs --emit=dep-info,link \
-C opt-level=3[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
}
#[test]
fn all_targets_no_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v").arg("--all-targets"),
execs().with_status(0)
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \
-C debuginfo=2 --test [..]")
);
}
#[test]
fn no_linkable_target() {
// Issue 3169. This is currently not an error as per discussion in PR #4797
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
the_lib = { path = "the_lib" }
"#)
.file("src/main.rs", "fn main() {}")
.file("the_lib/Cargo.toml", r#"
[package]
name = "the_lib"
version = "0.1.0"
[lib]
name = "the_lib"
crate-type = ["staticlib"]
"#)
.file("the_lib/src/lib.rs", "pub fn foo() {}")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(0)
.with_stderr_contains("\
[WARNING] The package `the_lib` provides no linkable [..] \
while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]"));
}
Remove test that requires Emscripten
This seems relatively unlikely to regress in the future and otherwise arranging
Cargo to have a rustc that has Emscripten support will become difficult in the
near future in the Rust repo. All in all it seems like the course of action to
take is to delete this test for now.
extern crate cargo;
#[macro_use]
extern crate cargotest;
extern crate hamcrest;
extern crate tempdir;
use std::env;
use std::fs::{self, File};
use std::io::prelude::*;
use cargo::util::paths::dylib_path_envvar;
use cargo::util::{process, ProcessBuilder};
use cargotest::{is_nightly, rustc_host, sleep_ms};
use cargotest::support::paths::{CargoPathExt,root};
use cargotest::support::{ProjectBuilder};
use cargotest::support::{project, execs, main_file, basic_bin_manifest};
use cargotest::support::registry::Package;
use cargotest::ChannelChanger;
use hamcrest::{assert_that, existing_file, existing_dir, is_not};
use tempdir::TempDir;
#[test]
fn cargo_compile_simple() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
}
#[test]
fn cargo_fail_with_no_stderr() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &String::from("refusal"))
.build();
assert_that(p.cargo("build").arg("--message-format=json"), execs().with_status(101)
.with_stderr_does_not_contain("--- stderr"));
}
/// Check that the `CARGO_INCREMENTAL` environment variable results in
/// `rustc` getting `-Zincremental` passed to it.
#[test]
fn cargo_compile_incremental() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
.with_status(0));
assert_that(
p.cargo("test").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..][/]target[/]debug[/]incremental[..]`\n")
.with_status(0));
}
#[test]
fn incremental_profile() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[profile.dev]
incremental = false
[profile.release]
incremental = true
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("--release").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("--release").arg("-v").env("CARGO_INCREMENTAL", "0"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
}
#[test]
fn incremental_config() {
if !is_nightly() {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file(".cargo/config", r#"
[build]
incremental = false
"#)
.build();
assert_that(
p.cargo("build").arg("-v").env_remove("CARGO_INCREMENTAL"),
execs().with_stderr_does_not_contain("[..]C incremental=[..]")
.with_status(0));
assert_that(
p.cargo("build").arg("-v").env("CARGO_INCREMENTAL", "1"),
execs().with_stderr_contains("[..]C incremental=[..]")
.with_status(0));
}
#[test]
fn cargo_compile_manifest_path() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build")
.arg("--manifest-path").arg("foo/Cargo.toml")
.cwd(p.root().parent().unwrap()),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_manifest() {
let p = project("foo")
.file("Cargo.toml", "")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
virtual manifests must be configured with [workspace]
"))
}
#[test]
fn cargo_compile_with_invalid_manifest2() {
let p = project("foo")
.file("Cargo.toml", r"
[project]
foo = bar
")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 3
"))
}
#[test]
fn cargo_compile_with_invalid_manifest3() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/Cargo.toml", "a = bar")
.build();
assert_that(p.cargo("build").arg("--manifest-path")
.arg("src/Cargo.toml"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 1
"))
}
#[test]
fn cargo_compile_duplicate_build_targets() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "main"
path = "src/main.rs"
crate-type = ["dylib"]
[dependencies]
"#)
.file("src/main.rs", r#"
#![allow(warnings)]
fn main() {}
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(0)
.with_stderr("\
warning: file found to be present in multiple build targets: [..]main.rs
[COMPILING] foo v0.0.1 ([..])
[FINISHED] [..]
"));
}
#[test]
fn cargo_compile_with_invalid_version() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "1.0"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Expected dot for key `project.version`
"))
}
#[test]
fn cargo_compile_with_invalid_package_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = ""
authors = []
version = "0.0.0"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
package name cannot be an empty string
"))
}
#[test]
fn cargo_compile_with_invalid_bin_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = ""
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
binary target names cannot be empty
"))
}
#[test]
fn cargo_compile_with_forbidden_bin_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "build"
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the binary target name `build` is forbidden
"))
}
#[test]
fn cargo_compile_with_invalid_lib_target_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[lib]
name = ""
"#)
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
library target names cannot be empty
"))
}
#[test]
fn cargo_compile_without_manifest() {
let tmpdir = TempDir::new("cargo").unwrap();
let p = ProjectBuilder::new("foo", tmpdir.path().to_path_buf()).build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory
"));
}
#[test]
fn cargo_compile_with_invalid_code() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "invalid rust code!")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(101)
.with_stderr_contains("\
[ERROR] Could not compile `foo`.
To learn more, run the command again with --verbose.\n"));
assert_that(&p.root().join("Cargo.lock"), existing_file());
}
#[test]
fn cargo_compile_with_invalid_code_in_deps() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "../bar"
[dependencies.baz]
path = "../baz"
"#)
.file("src/main.rs", "invalid rust code!")
.build();
let _bar = project("bar")
.file("Cargo.toml", &basic_bin_manifest("bar"))
.file("src/lib.rs", "invalid rust code!")
.build();
let _baz = project("baz")
.file("Cargo.toml", &basic_bin_manifest("baz"))
.file("src/lib.rs", "invalid rust code!")
.build();
assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn cargo_compile_with_warnings_in_the_root_package() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "fn main() {} fn dead() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0).with_stderr_contains("\
[..]function is never used: `dead`[..]
"));
}
#[test]
fn cargo_compile_with_warnings_in_a_dep_package() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
pub fn gimme() -> &'static str {
"test passed"
}
fn dead() {}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(0).with_stderr_contains("\
[..]function is never used: `dead`[..]
"));
assert_that(&p.bin("foo"), existing_file());
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_inferred() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = 'bar'
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("baz/src/lib.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_correct_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("baz/src/lib.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_shorthand() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
p.cargo("build")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(
process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
#[test]
fn cargo_compile_with_nested_deps_longhand() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.bar]
path = "bar"
version = "0.5.0"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
[dependencies.baz]
path = "../baz"
version = "0.5.0"
[lib]
name = "bar"
"#)
.file("bar/src/bar.rs", r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "baz"
"#)
.file("baz/src/baz.rs", r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#)
.build();
assert_that(p.cargo("build"), execs());
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("libbar.rlib"), is_not(existing_file()));
assert_that(&p.bin("libbaz.rlib"), is_not(existing_file()));
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("test passed\n"));
}
// Check that Cargo gives a sensible error if a dependency can't be found
// because of a name mismatch.
#[test]
fn cargo_compile_with_dep_name_mismatch() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = ["wycats@example.com"]
[[bin]]
name = "foo"
[dependencies.notquitebar]
path = "bar"
"#)
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
.file("bar/Cargo.toml", &basic_bin_manifest("bar"))
.file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
.build();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr(&format!(
r#"[ERROR] no matching package named `notquitebar` found (required by `foo`)
location searched: {proj_dir}/bar
version required: *
"#, proj_dir = p.url())));
}
#[test]
fn cargo_compile_with_filename() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", r#"
extern crate foo;
fn main() { println!("hello a.rs"); }
"#)
.file("examples/a.rs", r#"
fn main() { println!("example"); }
"#)
.build();
assert_that(p.cargo("build").arg("--bin").arg("bin.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no bin target named `bin.rs`"));
assert_that(p.cargo("build").arg("--bin").arg("a.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no bin target named `a.rs`
Did you mean `a`?"));
assert_that(p.cargo("build").arg("--example").arg("example.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no example target named `example.rs`"));
assert_that(p.cargo("build").arg("--example").arg("a.rs"),
execs().with_status(101).with_stderr("\
[ERROR] no example target named `a.rs`
Did you mean `a`?"));
}
#[test]
fn cargo_compile_path_with_offline() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0));
}
#[test]
fn cargo_compile_with_downloaded_dependency_with_offline() {
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.3"
"#)
.file("src/lib.rs", "")
.publish();
{
// make package downloaded
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),execs().with_status(0));
}
let p2 = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p2.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] present_dep v1.2.3
[COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]")));
}
#[test]
fn cargo_compile_offline_not_try_update() {
let p = project("bar")
.file("Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
not_cached_dep = "1.2.5"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(101)
.with_stderr("\
error: no matching package named `not_cached_dep` found (required by `bar`)
location searched: registry `[..]`
version required: ^1.2.5
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag."));
}
#[test]
fn compile_offline_without_maxvers_cached(){
Package::new("present_dep", "1.2.1").publish();
Package::new("present_dep", "1.2.2").publish();
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.3"
"#)
.file("src/lib.rs", r#"pub fn get_version()->&'static str {"1.2.3"}"#)
.publish();
Package::new("present_dep", "1.2.5")
.file("Cargo.toml", r#"
[project]
name = "present_dep"
version = "1.2.5"
"#)
.file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
.publish();
{
// make package cached
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "=1.2.3"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),execs().with_status(0));
}
let p2 = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2"
"#)
.file("src/main.rs", "\
extern crate present_dep;
fn main(){
println!(\"{}\", present_dep::get_version());
}")
.build();
assert_that(p2.cargo("run").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(0)
.with_stderr(format!("\
[COMPILING] present_dep v1.2.3
[COMPILING] foo v0.1.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
Running `[..]`", url = p2.url()))
.with_stdout("1.2.3")
);
}
#[test]
fn compile_offline_while_transitive_dep_not_cached() {
let bar = Package::new("bar", "1.0.0");
let bar_path = bar.archive_dst();
bar.publish();
let mut content = Vec::new();
let mut file = File::open(bar_path.clone()).ok().unwrap();
let _ok = file.read_to_end(&mut content).ok().unwrap();
drop(file);
drop(File::create(bar_path.clone()).ok().unwrap() );
Package::new("foo", "0.1.0").dep("bar", "1.0.0").publish();
let p = project("transitive_load_test")
.file("Cargo.toml", r#"
[project]
name = "transitive_load_test"
version = "0.0.1"
[dependencies]
foo = "0.1.0"
"#)
.file("src/main.rs", "fn main(){}")
.build();
// simulate download foo, but fail to download bar
let _out = p.cargo("build").exec_with_output();
drop( File::create(bar_path).ok().unwrap().write_all(&content) );
assert_that(p.cargo("build").masquerade_as_nightly_cargo().arg("-Zoffline"),
execs().with_status(101)
.with_stderr("\
error: no matching package named `bar` found (required by `foo`)
location searched: registry `[..]`
version required: = 1.0.0
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag."));
}
#[test]
fn compile_path_dep_then_change_version() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
File::create(&p.root().join("bar/Cargo.toml")).unwrap().write_all(br#"
[package]
name = "bar"
version = "0.0.2"
authors = []
"#).unwrap();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr("\
[ERROR] no matching version `= 0.0.1` found for package `bar` (required by `foo`)
location searched: [..]
versions found: 0.0.2
consider running `cargo update` to update a path dependency's locked version
"));
}
#[test]
fn ignores_carriage_return_in_lockfile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
"#)
.file("src/main.rs", r#"
mod a; fn main() {}
"#)
.file("src/a.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
let lockfile = p.root().join("Cargo.lock");
let mut lock = String::new();
File::open(&lockfile).unwrap().read_to_string(&mut lock).unwrap();
let lock = lock.replace("\n", "\r\n");
File::create(&lockfile).unwrap().write_all(lock.as_bytes()).unwrap();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn cargo_default_env_metadata_env_var() {
// Ensure that path dep + dylib + env_var get metadata
// (even though path_dep + dylib should not)
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "// hi")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#)
.file("bar/src/lib.rs", "// hello")
.build();
// No metadata on libbar since it's a dylib path dependency
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)));
assert_that(p.cargo("clean"), execs().with_status(0));
// If you set the env-var, then we expect metadata on libbar
assert_that(p.cargo("build").arg("-v").env("__CARGO_DEFAULT_LIB_METADATA", "stable"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] bar v0.0.1 ({url}/bar)
[RUNNING] `rustc --crate-name bar bar[/]src[/]lib.rs --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[COMPILING] foo v0.0.1 ({url})
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps \
--extern bar={dir}[/]target[/]debug[/]deps[/]{prefix}bar-[..]{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)));
}
#[test]
fn crate_env_vars() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
description = "This is foo"
homepage = "http://example.com"
authors = ["wycats@example.com"]
"#)
.file("src/main.rs", r#"
extern crate foo;
static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE");
static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION");
fn main() {
let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
CARGO_MANIFEST_DIR);
assert_eq!(s, foo::version());
println!("{}", s);
assert_eq!("foo", PKG_NAME);
assert_eq!("http://example.com", HOMEPAGE);
assert_eq!("This is foo", DESCRIPTION);
let s = format!("{}.{}.{}-{}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
assert_eq!(s, VERSION);
}
"#)
.file("src/lib.rs", r#"
pub fn version() -> String {
format!("{}-{}-{} @ {} in {}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
env!("CARGO_PKG_VERSION_PRE"),
env!("CARGO_MANIFEST_DIR"))
}
"#)
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout(&format!("0-5-1 @ alpha.1 in {}\n",
p.root().display())));
println!("test");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
}
#[test]
fn crate_authors_env_vars() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
authors = ["wycats@example.com", "neikos@example.com"]
"#)
.file("src/main.rs", r#"
extern crate foo;
static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
fn main() {
let s = "wycats@example.com:neikos@example.com";
assert_eq!(AUTHORS, foo::authors());
println!("{}", AUTHORS);
assert_eq!(s, AUTHORS);
}
"#)
.file("src/lib.rs", r#"
pub fn authors() -> String {
format!("{}", env!("CARGO_PKG_AUTHORS"))
}
"#)
.build();
println!("build");
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
println!("bin");
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("wycats@example.com:neikos@example.com"));
println!("test");
assert_that(p.cargo("test").arg("-v"),
execs().with_status(0));
}
// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
fn setenv_for_removing_empty_component(mut p: ProcessBuilder) -> ProcessBuilder {
let v = dylib_path_envvar();
if let Ok(search_path) = env::var(v) {
let new_search_path =
env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
.expect("join_paths");
p.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
}
p
}
// Regression test for #4277
#[test]
fn crate_library_path_env_var() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", &format!(r##"
fn main() {{
let search_path = env!("{}");
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert!(!paths.contains(&"".into()));
}}
"##, dylib_path_envvar()))
.build();
assert_that(setenv_for_removing_empty_component(p.cargo("run")),
execs().with_status(0));
}
// Regression test for #4277
#[test]
fn build_with_fake_libc_not_loading() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("src/lib.rs", r#" "#)
.file("libc.so.6", r#""#)
.build();
assert_that(setenv_for_removing_empty_component(p.cargo("build")),
execs().with_status(0));
}
// this is testing that src/<pkg-name>.rs still works (for now)
#[test]
fn many_crate_types_old_style_lib_location() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#)
.file("src/foo.rs", r#"
pub fn foo() {}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]foo.rs` was erroneously implicitly accepted for library `foo`,
please rename the file to `src/lib.rs` or set lib.path in Cargo.toml"));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn many_crate_types_correct() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#)
.file("src/lib.rs", r#"
pub fn foo() {}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(&fname), existing_file());
}
#[test]
fn self_dependency() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.test]
path = "."
[lib]
name = "test"
path = "src/test.rs"
"#)
.file("src/test.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] cyclic package dependency: package `test v0.0.0 ([..])` depends on itself
"));
}
#[test]
fn ignore_broken_symlinks() {
// windows and symlinks don't currently agree that well
if cfg!(windows) { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.symlink("Notafile", "bar")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
}
#[test]
fn missing_lib_and_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr("\
[ERROR] failed to parse manifest at `[..]Cargo.toml`
Caused by:
no targets specified in the manifest
either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n"));
}
#[test]
fn lto_build() {
// FIXME: currently this hits a linker bug on 32-bit MSVC
if cfg!(all(target_env = "msvc", target_pointer_width = "32")) {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[profile.release]
lto = true
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]main.rs --crate-type bin \
--emit=dep-info,link \
-C opt-level=3 \
-C lto \
-C metadata=[..] \
--out-dir {dir}[/]target[/]release[/]deps \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_build() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]debug[/]deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_release_build() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
)));
}
#[test]
fn verbose_release_build_deps() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.foo]
path = "foo"
"#)
.file("src/lib.rs", "")
.file("foo/Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
crate_type = ["dylib", "rlib"]
"#)
.file("foo/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--release"),
execs().with_status(0).with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url}/foo)
[RUNNING] `rustc --crate-name foo foo[/]src[/]lib.rs \
--crate-type dylib --crate-type rlib \
--emit=dep-info,link \
-C prefer-dynamic \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps`
[COMPILING] test v0.0.0 ({url})
[RUNNING] `rustc --crate-name test src[/]lib.rs --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency={dir}[/]target[/]release[/]deps \
--extern foo={dir}[/]target[/]release[/]deps[/]{prefix}foo{suffix} \
--extern foo={dir}[/]target[/]release[/]deps[/]libfoo.rlib`
[FINISHED] release [optimized] target(s) in [..]
",
dir = p.root().display(),
url = p.url(),
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX)));
}
#[test]
fn explicit_examples() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[lib]
name = "world"
path = "src/lib.rs"
[[example]]
name = "hello"
path = "examples/ex-hello.rs"
[[example]]
name = "goodbye"
path = "examples/ex-goodbye.rs"
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#)
.file("examples/ex-hello.rs", r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_hello(), world::get_world()); }
"#)
.file("examples/ex-goodbye.rs", r#"
extern crate world;
fn main() { println!("{}, {}!", world::get_goodbye(), world::get_world()); }
"#)
.build();
assert_that(p.cargo("test").arg("-v"), execs().with_status(0));
assert_that(process(&p.bin("examples/hello")),
execs().with_status(0).with_stdout("Hello, World!\n"));
assert_that(process(&p.bin("examples/goodbye")),
execs().with_status(0).with_stdout("Goodbye, World!\n"));
}
#[test]
fn non_existing_example() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[lib]
name = "world"
path = "src/lib.rs"
[[example]]
name = "hello"
"#)
.file("src/lib.rs", "")
.file("examples/ehlo.rs", "")
.build();
assert_that(p.cargo("test").arg("-v"), execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `hello` example, specify example.path"));
}
#[test]
fn non_existing_binary() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
[[bin]]
name = "hello"
"#)
.file("src/lib.rs", "")
.file("src/bin/ehlo.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `hello` bin, specify bin.path"));
}
#[test]
fn legacy_binary_paths_warinigs() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]bin[/]main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#)
.file("src/bar.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0).with_stderr_contains("\
[WARNING] path `[..]src[/]bar.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml"));
}
#[test]
fn implicit_examples() {
let p = project("world")
.file("Cargo.toml", r#"
[package]
name = "world"
version = "1.0.0"
authors = []
"#)
.file("src/lib.rs", r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#)
.file("examples/hello.rs", r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_hello(), world::get_world());
}
"#)
.file("examples/goodbye.rs", r#"
extern crate world;
fn main() {
println!("{}, {}!", world::get_goodbye(), world::get_world());
}
"#)
.build();
assert_that(p.cargo("test"), execs().with_status(0));
assert_that(process(&p.bin("examples/hello")),
execs().with_status(0).with_stdout("Hello, World!\n"));
assert_that(process(&p.bin("examples/goodbye")),
execs().with_status(0).with_stdout("Goodbye, World!\n"));
}
#[test]
fn standard_build_no_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("slow\n"));
}
#[test]
fn release_build_ndebug() {
let p = project("world")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#)
.build();
assert_that(p.cargo("build").arg("--release"),
execs().with_status(0));
assert_that(process(&p.release_bin("foo")),
execs().with_status(0).with_stdout("fast\n"));
}
#[test]
fn inferred_main_bin() {
let p = project("world")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")), execs().with_status(0));
}
#[test]
fn deletion_causes_failure() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", r#"
extern crate bar;
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
"#)
.file("bar/src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
p.change_file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#);
assert_that(p.cargo("build"), execs().with_status(101));
}
#[test]
fn bad_cargo_toml_in_target_dir() {
let p = project("world")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("target/Cargo.toml", "bad-toml")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(process(&p.bin("foo")), execs().with_status(0));
}
#[test]
fn lib_with_standard_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "syntax"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "
pub fn foo() {}
")
.file("src/main.rs", "
extern crate syntax;
fn main() { syntax::foo() }
")
.build();
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] syntax v0.0.1 ({dir})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
dir = p.url())));
}
#[test]
fn simple_staticlib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib"]
"#)
.file("src/lib.rs", "pub fn foo() {}")
.build();
// env var is a test for #1381
assert_that(p.cargo("build").env("RUST_LOG", "nekoneko=trace"),
execs().with_status(0));
}
#[test]
fn staticlib_rlib_and_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib", "rlib"]
"#)
.file("src/lib.rs", "pub fn foo() {}")
.file("src/main.rs", r#"
extern crate foo;
fn main() {
foo::foo();
}"#)
.build();
assert_that(p.cargo("build").arg("-v"), execs().with_status(0));
}
#[test]
fn opt_out_of_bin() {
let p = project("foo")
.file("Cargo.toml", r#"
bin = []
[package]
name = "foo"
authors = []
version = "0.0.1"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "bad syntax")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn single_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
path = "src/bar.rs"
"#)
.file("src/bar.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn freshness_ignores_excluded() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
exclude = ["src/b*.rs"]
"#)
.file("build.rs", "fn main() {}")
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
// Smoke test to make sure it doesn't compile again
println!("first pass");
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stdout(""));
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
File::create(&foo.root().join("src/bar.rs")).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stdout(""));
}
#[test]
fn rebuild_preserves_out_dir() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 'build.rs'
"#)
.file("build.rs", r#"
use std::env;
use std::fs::File;
use std::path::Path;
fn main() {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo");
if env::var_os("FIRST").is_some() {
File::create(&path).unwrap();
} else {
File::create(&path).unwrap();
}
}
"#)
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
assert_that(foo.cargo("build").env("FIRST", "1"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
File::create(&foo.root().join("src/bar.rs")).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0)
.with_stderr(&format!("\
[COMPILING] foo v0.0.0 ({url})
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
", url = foo.url())));
}
#[test]
fn dep_no_libs() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[dependencies.bar]
path = "bar"
"#)
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.file("bar/Cargo.toml", r#"
[package]
name = "bar"
version = "0.0.0"
authors = []
"#)
.file("bar/src/main.rs", "")
.build();
assert_that(foo.cargo("build"),
execs().with_status(0));
}
#[test]
fn recompile_space_in_name() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
path = "src/my lib.rs"
"#)
.file("src/my lib.rs", "")
.build();
assert_that(foo.cargo("build"), execs().with_status(0));
foo.root().move_into_the_past();
assert_that(foo.cargo("build"),
execs().with_status(0).with_stdout(""));
}
#[cfg(unix)]
#[test]
fn ignore_bad_directories() {
use std::os::unix::prelude::*;
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
let dir = foo.root().join("tmp");
fs::create_dir(&dir).unwrap();
let stat = fs::metadata(&dir).unwrap();
let mut perms = stat.permissions();
perms.set_mode(0o644);
fs::set_permissions(&dir, perms.clone()).unwrap();
assert_that(foo.cargo("build"),
execs().with_status(0));
perms.set_mode(0o755);
fs::set_permissions(&dir, perms).unwrap();
}
#[test]
fn bad_cargo_config() {
let foo = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.file(".cargo/config", r#"
this is not valid toml
"#)
.build();
assert_that(foo.cargo("build").arg("-v"),
execs().with_status(101).with_stderr("\
[ERROR] Couldn't load Cargo configuration
Caused by:
could not parse TOML configuration in `[..]`
Caused by:
could not parse input as TOML
Caused by:
expected an equals, found an identifier at line 2
"));
}
#[test]
fn cargo_platform_specific_dependency() {
let host = rustc_host();
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
build = "build.rs"
[target.{host}.dependencies]
dep = {{ path = "dep" }}
[target.{host}.build-dependencies]
build = {{ path = "build" }}
[target.{host}.dev-dependencies]
dev = {{ path = "dev" }}
"#, host = host))
.file("src/main.rs", r#"
extern crate dep;
fn main() { dep::dep() }
"#)
.file("tests/foo.rs", r#"
extern crate dev;
#[test]
fn foo() { dev::dev() }
"#)
.file("build.rs", r#"
extern crate build;
fn main() { build::build(); }
"#)
.file("dep/Cargo.toml", r#"
[project]
name = "dep"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("dep/src/lib.rs", "pub fn dep() {}")
.file("build/Cargo.toml", r#"
[project]
name = "build"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("build/src/lib.rs", "pub fn build() {}")
.file("dev/Cargo.toml", r#"
[project]
name = "dev"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("dev/src/lib.rs", "pub fn dev() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(p.cargo("test"),
execs().with_status(0));
}
#[test]
fn bad_platform_specific_dependency() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[target.wrong-target.dependencies.bar]
path = "bar"
"#)
.file("src/main.rs",
&main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"
extern crate baz;
pub fn gimme() -> String {
format!("")
}
"#)
.build();
assert_that(p.cargo("build"),
execs().with_status(101));
}
#[test]
fn cargo_platform_specific_dependency_wrong_platform() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[target.non-existing-triplet.dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"
invalid rust file, should not be compiled
"#)
.build();
p.cargo("build").exec_with_output().unwrap();
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0));
let loc = p.root().join("Cargo.lock");
let mut lockfile = String::new();
File::open(&loc).unwrap().read_to_string(&mut lockfile).unwrap();
assert!(lockfile.contains("bar"))
}
#[test]
fn example_as_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["lib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "lib"), existing_file());
}
#[test]
fn example_as_rlib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["rlib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "rlib"), existing_file());
}
#[test]
fn example_as_dylib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["dylib"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "dylib"), existing_file());
}
#[test]
fn example_as_proc_macro() {
if !is_nightly() {
return;
}
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["proc-macro"]
"#)
.file("src/lib.rs", "")
.file("examples/ex.rs", "#![feature(proc_macro)]")
.build();
assert_that(p.cargo("build").arg("--example=ex"), execs().with_status(0));
assert_that(&p.example_lib("ex", "proc-macro"), existing_file());
}
#[test]
fn example_bin_same_name() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.build();
p.cargo("test").arg("--no-run").arg("-v")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), is_not(existing_file()));
// We expect a file of the form bin/foo-{metadata_hash}
assert_that(&p.bin("examples/foo"), existing_file());
p.cargo("test").arg("--no-run").arg("-v")
.exec_with_output()
.unwrap();
assert_that(&p.bin("foo"), is_not(existing_file()));
// We expect a file of the form bin/foo-{metadata_hash}
assert_that(&p.bin("examples/foo"), existing_file());
}
#[test]
fn compile_then_delete() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("run").arg("-v"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
if cfg!(windows) {
// On windows unlinking immediately after running often fails, so sleep
sleep_ms(100);
}
fs::remove_file(&p.bin("foo")).unwrap();
assert_that(p.cargo("run").arg("-v"),
execs().with_status(0));
}
#[test]
fn transitive_dependencies_not_available() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.aaaaa]
path = "a"
"#)
.file("src/main.rs", "extern crate bbbbb; extern crate aaaaa; fn main() {}")
.file("a/Cargo.toml", r#"
[package]
name = "aaaaa"
version = "0.0.1"
authors = []
[dependencies.bbbbb]
path = "../b"
"#)
.file("a/src/lib.rs", "extern crate bbbbb;")
.file("b/Cargo.toml", r#"
[package]
name = "bbbbb"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101)
.with_stderr_contains("\
[..] can't find crate for `bbbbb`[..]
"));
}
#[test]
fn cyclic_deps_rejected() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.a]
path = "a"
"#)
.file("src/lib.rs", "")
.file("a/Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dependencies.foo]
path = ".."
"#)
.file("a/src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101)
.with_stderr("\
[ERROR] cyclic package dependency: package `a v0.0.1 ([..])` depends on itself
"));
}
#[test]
fn predictable_filenames() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate-type = ["dylib", "rlib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
assert_that(&p.root().join("target/debug/libfoo.rlib"), existing_file());
let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX,
env::consts::DLL_SUFFIX);
assert_that(&p.root().join("target/debug").join(dylib_name),
existing_file());
}
#[test]
fn dashes_to_underscores() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo-bar"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
assert_that(&p.bin("foo-bar"), existing_file());
}
#[test]
fn dashes_in_crate_name_bad() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo-bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(101));
}
#[test]
fn rustc_env_var() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build")
.env("RUSTC", "rustc-that-does-not-exist").arg("-v"),
execs().with_status(101)
.with_stderr("\
[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
Caused by:
[..]
"));
assert_that(&p.bin("a"), is_not(existing_file()));
}
#[test]
fn filtering() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--lib"),
execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(p.cargo("build").arg("--bin=a").arg("--example=a"),
execs().with_status(0));
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
assert_that(&p.bin("examples/b"), is_not(existing_file()));
}
#[test]
fn filtering_implicit_bins() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--bins"),
execs().with_status(0));
assert_that(&p.bin("a"), existing_file());
assert_that(&p.bin("b"), existing_file());
assert_that(&p.bin("examples/a"), is_not(existing_file()));
assert_that(&p.bin("examples/b"), is_not(existing_file()));
}
#[test]
fn filtering_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--examples"),
execs().with_status(0));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/a"), existing_file());
assert_that(&p.bin("examples/b"), existing_file());
}
#[test]
fn ignore_dotfile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/.a.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn ignore_dotdirs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/a.rs", "fn main() {}")
.file(".git/Cargo.toml", "")
.file(".pc/dummy-fix.patch/Cargo.toml", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn dotdir_root() {
let p = ProjectBuilder::new("foo", root().join(".foo"))
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/bin/a.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(0));
}
#[test]
fn custom_target_dir() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "foo/target"),
execs().with_status(0));
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
is_not(existing_file()));
assert_that(p.cargo("build"),
execs().with_status(0));
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
existing_file());
fs::create_dir(p.root().join(".cargo")).unwrap();
File::create(p.root().join(".cargo/config")).unwrap().write_all(br#"
[build]
target-dir = "foo/target"
"#).unwrap();
assert_that(p.cargo("build").env("CARGO_TARGET_DIR", "bar/target"),
execs().with_status(0));
assert_that(&p.root().join("bar/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("foo/target/debug").join(&exe_name),
existing_file());
assert_that(&p.root().join("target/debug").join(&exe_name),
existing_file());
}
#[test]
fn rustc_no_trans() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"),
execs().with_status(0));
}
#[test]
fn build_multiple_packages() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[[bin]]
name = "d1"
"#)
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.file("d2/Cargo.toml", r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[[bin]]
name = "d2"
doctest = false
"#)
.file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("d2")
.arg("-p").arg("foo"),
execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("i am foo\n"));
let d1_path = &p.build_dir().join("debug")
.join(format!("d1{}", env::consts::EXE_SUFFIX));
let d2_path = &p.build_dir().join("debug")
.join(format!("d2{}", env::consts::EXE_SUFFIX));
assert_that(d1_path, existing_file());
assert_that(process(d1_path), execs().with_status(0).with_stdout("d1"));
assert_that(d2_path, existing_file());
assert_that(process(d2_path),
execs().with_status(0).with_stdout("d2"));
}
#[test]
fn invalid_spec() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[[bin]]
name = "foo"
"#)
.file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", r#"
[package]
name = "d1"
version = "0.0.1"
authors = []
[[bin]]
name = "d1"
"#)
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.build();
assert_that(p.cargo("build").arg("-p").arg("notAValidDep"),
execs().with_status(101).with_stderr("\
[ERROR] package id specification `notAValidDep` matched no packages
"));
assert_that(p.cargo("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"),
execs().with_status(101).with_stderr("\
[ERROR] package id specification `notAValidDep` matched no packages
"));
}
#[test]
fn manifest_with_bom_is_ok() {
let p = project("foo")
.file("Cargo.toml", "\u{FEFF}
[package]
name = \"foo\"
version = \"0.0.1\"
authors = []
")
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0));
}
#[test]
fn panic_abort_compiles_with_panic_abort() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[profile.dev]
panic = 'abort'
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr_contains("[..] -C panic=abort [..]"));
}
#[test]
fn explicit_color_config_is_propagated_to_rustc() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "test"
version = "0.0.0"
authors = []
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build").arg("-v").arg("--color").arg("always"),
execs().with_status(0).with_stderr_contains(
"[..]rustc [..] src[/]lib.rs --color always[..]"));
assert_that(p.cargo("clean"), execs().with_status(0));
assert_that(p.cargo("build").arg("-v").arg("--color").arg("never"),
execs().with_status(0).with_stderr("\
[COMPILING] test v0.0.0 ([..])
[RUNNING] `rustc [..] --color never [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
"));
}
#[test]
fn compiler_json_error_format() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["wycats@example.com"]
[profile.dev]
debug = false # prevent the *.dSYM from affecting the test result
[dependencies.bar]
path = "bar"
"#)
.file("src/main.rs", "fn main() { let unused = 92; }")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
.file("bar/src/lib.rs", r#"fn dead() {}"#)
.build();
assert_that(p.cargo("build").arg("-v")
.arg("--message-format").arg("json"),
execs().with_status(0).with_json(r#"
{
"reason":"compiler-message",
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": false
}
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": ["[..]"],
"fresh": false
}
"#));
// With fresh build, we should repeat the artifacts,
// but omit compiler warnings.
assert_that(p.cargo("build").arg("-v")
.arg("--message-format").arg("json"),
execs().with_status(0).with_json(r#"
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": true
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": null,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": ["[..]"],
"fresh": true
}
"#));
}
#[test]
fn wrong_message_format_option() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--message-format").arg("XML"),
execs().with_status(1)
.with_stderr_contains(
r#"[ERROR] Could not match 'xml' with any of the allowed variants: ["Human", "Json"]"#));
}
#[test]
fn message_format_json_forward_stderr() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() { let unused = 0; }")
.build();
assert_that(p.cargo("rustc").arg("--release").arg("--bin").arg("foo")
.arg("--message-format").arg("JSON"),
execs().with_status(0)
.with_json(r#"
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"name":"foo",
"src_path":"[..]"
},
"profile":{
"debug_assertions":false,
"debuginfo":null,
"opt_level":"3",
"overflow_checks": false,
"test":false
},
"features":[],
"filenames":["[..]"],
"fresh": false
}
"#));
}
#[test]
fn no_warn_about_package_metadata() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[package.metadata]
foo = "bar"
a = true
b = 3
[package.metadata.another]
bar = 3
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr("[..] foo v0.0.1 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn cargo_build_empty_target() {
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("--target").arg(""),
execs().with_status(101)
.with_stderr_contains("[..] target was empty"));
}
#[test]
fn build_all_workspace() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_all_exclude() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.file("baz/Cargo.toml", r#"
[project]
name = "baz"
version = "0.1.0"
"#)
.file("baz/src/lib.rs", r#"
pub fn baz() {
break_the_build();
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all")
.arg("--exclude")
.arg("baz"),
execs().with_status(0)
.with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
.with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
.with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]"));
}
#[test]
fn build_all_workspace_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#)
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/c.rs", "fn main() {}")
.file("examples/d.rs", "fn main() {}")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/g.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
assert_that(p.cargo("build")
.arg("--all").arg("--examples"),
execs().with_status(0)
.with_stderr("[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
assert_that(&p.bin("examples/d"), existing_file());
assert_that(&p.bin("e"), is_not(existing_file()));
assert_that(&p.bin("f"), is_not(existing_file()));
assert_that(&p.bin("examples/g"), existing_file());
assert_that(&p.bin("examples/h"), existing_file());
}
#[test]
fn build_all_virtual_manifest() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_virtual_manifest_all_implied() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_virtual_manifest_one_project() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", r#"
pub fn foo() {}
"#)
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", r#"
pub fn bar() {}
"#)
.build();
assert_that(p.cargo("build")
.arg("-p").arg("foo"),
execs().with_status(0)
.with_stderr_does_not_contain("bar")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn build_all_virtual_manifest_implicit_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[workspace]
members = ["foo", "bar"]
"#)
.file("foo/Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("foo/src/lib.rs", "")
.file("foo/src/bin/a.rs", "fn main() {}")
.file("foo/src/bin/b.rs", "fn main() {}")
.file("foo/examples/c.rs", "fn main() {}")
.file("foo/examples/d.rs", "fn main() {}")
.file("bar/Cargo.toml", r#"
[project]
name = "bar"
version = "0.1.0"
"#)
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/g.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
// The order in which foo and bar are built is not guaranteed
assert_that(p.cargo("build")
.arg("--all").arg("--examples"),
execs().with_status(0)
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling foo v0.1.0 ([..])")
.with_stderr("[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
assert_that(&p.bin("a"), is_not(existing_file()));
assert_that(&p.bin("b"), is_not(existing_file()));
assert_that(&p.bin("examples/c"), existing_file());
assert_that(&p.bin("examples/d"), existing_file());
assert_that(&p.bin("e"), is_not(existing_file()));
assert_that(&p.bin("f"), is_not(existing_file()));
assert_that(&p.bin("examples/g"), existing_file());
assert_that(&p.bin("examples/h"), existing_file());
}
#[test]
fn build_all_member_dependency_same_name() {
let p = project("workspace")
.file("Cargo.toml", r#"
[workspace]
members = ["a"]
"#)
.file("a/Cargo.toml", r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
"#)
.file("a/src/lib.rs", r#"
pub fn a() {}
"#)
.build();
Package::new("a", "0.1.0").publish();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
.with_stderr("[..] Updating registry `[..]`\n\
[..] Downloading a v0.1.0 ([..])\n\
[..] Compiling a v0.1.0\n\
[..] Compiling a v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n"));
}
#[test]
fn run_proper_binary() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "main"
[[bin]]
name = "other"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", r#"
fn main() {
panic!("This should never be run.");
}
"#)
.file("src/bin/other.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("other"),
execs().with_status(0));
}
#[test]
fn run_proper_binary_main_rs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "")
.file("src/bin/main.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("foo"),
execs().with_status(0));
}
#[test]
fn run_proper_alias_binary_from_src() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/foo.rs", r#"
fn main() {
println!("foo");
}
"#).file("src/bar.rs", r#"
fn main() {
println!("bar");
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
);
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("foo\n"));
assert_that(process(&p.bin("bar")),
execs().with_status(0).with_stdout("bar\n"));
}
#[test]
fn run_proper_alias_binary_main_rs() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/main.rs", r#"
fn main() {
println!("main");
}
"#)
.build();
assert_that(p.cargo("build")
.arg("--all"),
execs().with_status(0)
);
assert_that(process(&p.bin("foo")),
execs().with_status(0).with_stdout("main\n"));
assert_that(process(&p.bin("bar")),
execs().with_status(0).with_stdout("main\n"));
}
#[test]
fn run_proper_binary_main_rs_as_foo() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
"#)
.file("src/foo.rs", r#"
fn main() {
panic!("This should never be run.");
}
"#)
.file("src/main.rs", r#"
fn main() {
}
"#)
.build();
assert_that(p.cargo("run").arg("--bin").arg("foo"),
execs().with_status(0));
}
#[test]
fn rustc_wrapper() {
// We don't have /usr/bin/env on Windows.
if cfg!(windows) { return }
let p = project("foo")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(p.cargo("build").arg("-v").env("RUSTC_WRAPPER", "/usr/bin/env"),
execs().with_stderr_contains(
"[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
.with_status(0));
}
#[test]
fn cdylib_not_lifted() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
let files = if cfg!(windows) {
vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo.dylib"]
} else {
vec!["libfoo.so"]
};
for file in files {
println!("checking: {}", file);
assert_that(&p.root().join("target/debug/deps").join(&file),
existing_file());
}
}
#[test]
fn cdylib_final_outputs() {
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo-bar"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
let files = if cfg!(windows) {
vec!["foo_bar.dll.lib", "foo_bar.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo_bar.dylib"]
} else {
vec!["libfoo_bar.so"]
};
for file in files {
println!("checking: {}", file);
assert_that(&p.root().join("target/debug").join(&file), existing_file());
}
}
#[test]
fn deterministic_cfg_flags() {
// This bug is non-deterministic
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
build = "build.rs"
[features]
default = ["f_a", "f_b", "f_c", "f_d"]
f_a = []
f_b = []
f_c = []
f_d = []
"#)
.file("build.rs", r#"
fn main() {
println!("cargo:rustc-cfg=cfg_a");
println!("cargo:rustc-cfg=cfg_b");
println!("cargo:rustc-cfg=cfg_c");
println!("cargo:rustc-cfg=cfg_d");
println!("cargo:rustc-cfg=cfg_e");
}
"#)
.file("src/main.rs", r#"
fn main() {}
"#)
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr("\
[COMPILING] foo v0.1.0 [..]
[RUNNING] [..]
[RUNNING] [..]
[RUNNING] `rustc --crate-name foo [..] \
--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
--cfg[..]f_c[..]--cfg[..]f_d[..] \
--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]"));
}
#[test]
fn explicit_bins_without_paths() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
}
#[test]
fn no_bin_in_src_with_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "foo"
"#)
.file("src/lib.rs", "")
.file("src/foo.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr_contains("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `foo` bin, specify bin.path"));
}
#[test]
fn inferred_bins() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.file("src/bin/baz/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("foo"), existing_file());
assert_that(&p.bin("bar"), existing_file());
assert_that(&p.bin("baz"), existing_file());
}
#[test]
fn inferred_bins_duplicate_name() {
// this should fail, because we have two binaries with the same name
let p = project("bar")
.file("Cargo.toml", r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.file("src/bin/foo.rs", "fn main() {}")
.file("src/bin/foo/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"),
execs().with_status(101)
.with_stderr_contains("\
[..]found duplicate binary name foo, but all binary targets must have a unique name[..]
"));
}
#[test]
fn inferred_bin_path() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "bar"
# Note, no `path` key!
"#)
.file("src/bin/bar/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build"), execs().with_status(0));
assert_that(&p.bin("bar"), existing_file());
}
#[test]
fn inferred_examples() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("examples/bar.rs", "fn main() {}")
.file("examples/baz/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("test"), execs().with_status(0));
assert_that(&p.bin("examples/bar"), existing_file());
assert_that(&p.bin("examples/baz"), existing_file());
}
#[test]
fn inferred_tests() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("tests/bar.rs", "fn main() {}")
.file("tests/baz/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("test").arg("--test=bar").arg("--test=baz"),
execs().with_status(0));
}
#[test]
fn inferred_benchmarks() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "fn main() {}")
.file("benches/bar.rs", "fn main() {}")
.file("benches/baz/main.rs", "fn main() {}")
.build();
assert_that(
p.cargo("bench").arg("--bench=bar").arg("--bench=baz"),
execs().with_status(0));
}
#[test]
fn same_metadata_different_directory() {
// A top-level crate built in two different workspaces should have the
// same metadata hash.
let p = project("foo1")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
let output = t!(String::from_utf8(
t!(p.cargo("build").arg("-v").exec_with_output())
.stderr,
));
let metadata = output
.split_whitespace()
.find(|arg| arg.starts_with("metadata="))
.unwrap();
let p = project("foo2")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
assert_that(
p.cargo("build").arg("-v"),
execs().with_status(0).with_stderr_contains(
format!("[..]{}[..]", metadata),
),
);
}
#[test]
fn building_a_dependent_crate_witout_bin_should_fail() {
Package::new("testless", "0.1.0")
.file("Cargo.toml", r#"
[project]
name = "testless"
version = "0.1.0"
[[bin]]
name = "a_bin"
"#)
.file("src/lib.rs", "")
.publish();
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
testless = "0.1.0"
"#)
.file("src/lib.rs", "")
.build();
assert_that(p.cargo("build"),
execs().with_status(101).with_stderr_contains(
"[..]can't find `a_bin` bin, specify bin.path"
));
}
#[test]
fn uplift_dsym_of_bin_on_mac() {
if !cfg!(any(target_os = "macos", target_os = "ios")) {
return
}
let p = project("foo")
.file("Cargo.toml", r#"
[project]
name = "foo"
version = "0.1.0"
"#)
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.file("tests/d.rs", "fn main() { panic!(); }")
.build();
assert_that(
p.cargo("build").arg("--bins").arg("--examples").arg("--tests"),
execs().with_status(0)
);
assert_that(&p.bin("foo.dSYM"), existing_dir());
assert_that(&p.bin("b.dSYM"), existing_dir());
assert!(
p.bin("b.dSYM")
.symlink_metadata()
.expect("read metadata from b.dSYM")
.file_type()
.is_symlink()
);
assert_that(&p.bin("c.dSYM"), is_not(existing_dir()));
assert_that(&p.bin("d.dSYM"), is_not(existing_dir()));
}
// Make sure that `cargo build` chooses the correct profile for building
// targets based on filters (assuming --profile is not specified).
#[test]
fn build_filter_infer_profile() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("benches/b1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
p.root().join("target").rm_rf();
assert_that(p.cargo("build").arg("-v").arg("--test=t1"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name t1 tests[/]t1.rs --emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
p.root().join("target").rm_rf();
assert_that(p.cargo("build").arg("-v").arg("--bench=b1"),
execs().with_status(0)
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]lib.rs --crate-type lib \
--emit=dep-info,link[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name b1 benches[/]b1.rs --emit=dep-info,link \
-C opt-level=3[..]")
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
);
}
#[test]
fn all_targets_no_lib() {
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
"#)
.file("src/main.rs", "fn main() {}")
.build();
assert_that(p.cargo("build").arg("-v").arg("--all-targets"),
execs().with_status(0)
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src[/]main.rs --emit=dep-info,link \
-C debuginfo=2 --test [..]")
);
}
#[test]
fn no_linkable_target() {
// Issue 3169. This is currently not an error as per discussion in PR #4797
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
the_lib = { path = "the_lib" }
"#)
.file("src/main.rs", "fn main() {}")
.file("the_lib/Cargo.toml", r#"
[package]
name = "the_lib"
version = "0.1.0"
[lib]
name = "the_lib"
crate-type = ["staticlib"]
"#)
.file("the_lib/src/lib.rs", "pub fn foo() {}")
.build();
assert_that(p.cargo("build"),
execs()
.with_status(0)
.with_stderr_contains("\
[WARNING] The package `the_lib` provides no linkable [..] \
while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]"));
}
|
extern crate ktensor as k;
use std::path::{Path};
use std::fs::{File};
use std::io::{self, BufReader, Read};
use std::error::{Error};
use std::cmp::{min};
fn read_u8(reader: &mut Read) -> u8 {
use std::mem;
let mut buf: [u8; 1] = [0];
match reader.read_exact(&mut buf).map(|_| {
let data: u8 = unsafe { mem::transmute(buf) };
data
}) {
Err(reason) => panic!("failed to read u8: {}", Error::description(&reason)),
Ok(file) => file,
}
}
fn read_u32(reader: &mut Read) -> u32 {
use std::mem;
let mut buf: [u8; 4] = [0, 0, 0, 0];
match reader.read_exact(&mut buf).map(|_| {
let data: u32 = unsafe { mem::transmute(buf) };
data
}) {
Err(reason) => panic!("failed to read u8: {}", Error::description(&reason)),
Ok(file) => file,
}
}
fn read_mnist(labels_path: &Path, labels_checknum: u32, data_path: &Path, data_checknum: u32, samples: usize) {
let mut labels_file = match File::open(&labels_path) {
Err(reason) => panic!("failed to open {}: {}", labels_path.display(), Error::description(&reason)),
Ok(file) => file,
};
let mut data_file = match File::open(&data_path) {
Err(reason) => panic!("failed to open {}: {}", data_path.display(), Error::description(&reason)),
Ok(file) => file,
};
let mut labels_reader = BufReader::new(labels_file);
let mut data_reader = BufReader::new(data_file);
let labels_magic = u32::from_be(read_u32(&mut labels_reader));
let data_magic = u32::from_be(read_u32(&mut data_reader));
println!("{} {}", labels_magic, data_magic);
assert_eq!(labels_magic, labels_checknum);
assert_eq!(data_magic, data_checknum);
}
#[test]
#[ignore]
fn mnist(){
let train_labels_path = Path::new("data/train-labels-idx1-ubyte");
let train_data_path = Path::new("data/train-images-idx3-ubyte");
read_mnist(&train_labels_path, 2049, &train_data_path, 2051, 1);
}
added loading of sample data
extern crate ktensor as k;
use k::{Arc, Vec2, Tensor, Context, Graph, State, Variable};
use std::path::{Path};
use std::fs::{File};
use std::io::{BufReader, Read};
use std::error::{Error};
use std::cmp;
fn read_u8(reader: &mut Read) -> u8 {
use std::mem;
let mut buf: [u8; 1] = [0];
match reader.read_exact(&mut buf).map(|_| {
let data: u8 = unsafe { mem::transmute(buf) };
data
}) {
Err(reason) => panic!("failed to read u8: {}", Error::description(&reason)),
Ok(byte) => byte,
}
}
fn read_u32(reader: &mut Read) -> u32 {
use std::mem;
let mut buf: [u8; 4] = [0, 0, 0, 0];
match reader.read_exact(&mut buf).map(|_| {
let data: u32 = unsafe { mem::transmute(buf) };
data
}) {
Err(reason) => panic!("failed to read u32: {}", Error::description(&reason)),
Ok(byte) => byte,
}
}
fn read_mnist(labels_path: &Path, labels_checknum: u32, data_path: &Path, data_checknum: u32, batch_size: usize, samples: Option<usize>) {
let labels_file = match File::open(&labels_path) {
Err(reason) => panic!("failed to open {}: {}", labels_path.display(), Error::description(&reason)),
Ok(file) => file,
};
let data_file = match File::open(&data_path) {
Err(reason) => panic!("failed to open {}: {}", data_path.display(), Error::description(&reason)),
Ok(file) => file,
};
let ref mut labels_reader = BufReader::new(labels_file);
let ref mut data_reader = BufReader::new(data_file);
let labels_magic = u32::from_be(read_u32(labels_reader));
let data_magic = u32::from_be(read_u32(data_reader));
assert_eq!(labels_magic, labels_checknum);
assert_eq!(data_magic, data_checknum);
let labels_count = u32::from_be(read_u32(labels_reader)) as usize;
let data_count = u32::from_be(read_u32(data_reader)) as usize;
let sample_count = cmp::min(labels_count, data_count);
let sample_count = cmp::min(sample_count, samples.unwrap_or(sample_count));
let rows = u32::from_be(read_u32(data_reader)) as usize;
let columns = u32::from_be(read_u32(data_reader)) as usize;
let mut sample_vec: Vec<(Vec<u8>, Vec<u8>)> = Vec::with_capacity(sample_count/batch_size);
for _ in 0..sample_count/batch_size {
let mut sample_data = vec![0u8; batch_size*rows*columns];
match data_reader.read_exact(sample_data.as_mut()) {
Err(reason) => panic!("failed to read data byte array: {}", Error::description(&reason)),
Ok(_) => (),
}
let mut sample_labels = vec![0u8; batch_size];
match labels_reader.read_exact(sample_labels.as_mut()) {
Err(reason) => panic!("failed to read labels byte array: {}", Error::description(&reason)),
Ok(_) => (),
}
sample_vec.push((sample_data, sample_labels));
}
}
#[test]
#[ignore]
fn mnist(){
let train_labels_path = Path::new("data/train-labels-idx1-ubyte");
let train_data_path = Path::new("data/train-images-idx3-ubyte");
read_mnist(&train_labels_path, 2049, &train_data_path, 2051, 16, Some(4096));
}
|
#![feature(macro_rules)]
extern crate existent;
use existent::{When, Unless};
macro_rules! basic_tests(
($($name:ident - $exp:expr),+) => (
mod test_unless {
use existent::Unless;
$(
#[test]
fn $name() {
assert_eq!(Some($exp), $exp.unless(false));
assert_eq!(None, $exp.unless(true));
}
)+
}
mod test_when {
use existent::When;
$(
#[test]
fn $name() {
assert_eq!(Some($exp), $exp.when(true));
assert_eq!(None, $exp.when(false));
}
)+
}
)
)
basic_tests!(
ints - 4u,
vecs - vec![false, true, false],
tuples - ("Hello", 42i32, 79.4f64),
strings - "World".to_string(),
static_slices - "!!!!!one1on1"
)
#[test]
fn filter_map() {
let xs = vec!["", "This", "", "Has", "Blanks"];
let filtered = xs.into_iter()
.filter_map(|s| s.unless(s.is_empty()))
.collect::<Vec<&str>>();
assert_eq!(filtered, vec!["This", "Has", "Blanks"])
}
#[test]
fn lifetimes_work() {
fn unless<'a>(s: &'a str) -> Option<&'a str> {
s.unless(false)
}
fn when<'a>(s: &'a str) -> Option<&'a str> {
s.when(true)
}
let x = "Abandoning".to_string();
let ys = {
let y = x.as_slice();
unless(y)
};
assert_eq!(Some(x.as_slice()), ys);
let ys = {
let y = x.as_slice();
when(y)
};
assert_eq!(Some(x.as_slice()), ys);
}
#[test]
fn ufcs() {
#[deriving(PartialEq, Show)]
struct Foo(uint);
impl Foo {
fn when(self, _: bool) -> Option<Foo> {
None
}
fn unless(self, _: bool) -> Option<Foo> {
Some(Foo(52))
}
}
assert_eq!(Some(Foo(4)), When::when(Foo(4), true))
assert_eq!(None, When::when(Foo(5), false))
assert_eq!(None, Unless::unless(Foo(6), true))
assert_eq!(Some(Foo(7)), Unless::unless(Foo(7), false))
assert_eq!(None, Foo(8).when(true))
assert_eq!(None, Foo(3).when(false))
assert_eq!(Some(Foo(52)), Foo(2).unless(true))
assert_eq!(Some(Foo(52)), Foo(1).unless(false))
}
#[test]
fn lazy() {
fn expensive_computation(bar: uint) -> uint {
42 * bar
}
let mut bar = 1;
assert_eq!(Some(42), (|| expensive_computation(bar)).unless(false))
assert_eq!(None::<uint>, (|| expensive_computation(bar)).unless(true))
bar = 2;
assert_eq!(Some(84), (|| expensive_computation(bar)).when(true))
assert_eq!(None::<uint>, (|| expensive_computation(bar)).when(false))
}
Macros now require semicolons
#![feature(macro_rules)]
extern crate existent;
use existent::{When, Unless};
macro_rules! basic_tests(
($($name:ident - $exp:expr),+) => (
mod test_unless {
use existent::Unless;
$(
#[test]
fn $name() {
assert_eq!(Some($exp), $exp.unless(false));
assert_eq!(None, $exp.unless(true));
}
)+
}
mod test_when {
use existent::When;
$(
#[test]
fn $name() {
assert_eq!(Some($exp), $exp.when(true));
assert_eq!(None, $exp.when(false));
}
)+
}
)
);
basic_tests!(
ints - 4u,
vecs - vec![false, true, false],
tuples - ("Hello", 42i32, 79.4f64),
strings - "World".to_string(),
static_slices - "!!!!!one1on1"
);
#[test]
fn filter_map() {
let xs = vec!["", "This", "", "Has", "Blanks"];
let filtered = xs.into_iter()
.filter_map(|s| s.unless(s.is_empty()))
.collect::<Vec<&str>>();
assert_eq!(filtered, vec!["This", "Has", "Blanks"]);
}
#[test]
fn lifetimes_work() {
fn unless<'a>(s: &'a str) -> Option<&'a str> {
s.unless(false)
}
fn when<'a>(s: &'a str) -> Option<&'a str> {
s.when(true)
}
let x = "Abandoning".to_string();
let ys = {
let y = x.as_slice();
unless(y)
};
assert_eq!(Some(x.as_slice()), ys);
let ys = {
let y = x.as_slice();
when(y)
};
assert_eq!(Some(x.as_slice()), ys);
}
#[test]
fn ufcs() {
#[deriving(PartialEq, Show)]
struct Foo(uint);
impl Foo {
fn when(self, _: bool) -> Option<Foo> {
None
}
fn unless(self, _: bool) -> Option<Foo> {
Some(Foo(52))
}
}
assert_eq!(Some(Foo(4)), When::when(Foo(4), true));
assert_eq!(None, When::when(Foo(5), false));
assert_eq!(None, Unless::unless(Foo(6), true));
assert_eq!(Some(Foo(7)), Unless::unless(Foo(7), false));
assert_eq!(None, Foo(8).when(true));
assert_eq!(None, Foo(3).when(false));
assert_eq!(Some(Foo(52)), Foo(2).unless(true));
assert_eq!(Some(Foo(52)), Foo(1).unless(false));
}
#[test]
fn lazy() {
fn expensive_computation(bar: uint) -> uint {
42 * bar
}
let mut bar = 1;
assert_eq!(Some(42), (|| expensive_computation(bar)).unless(false));
assert_eq!(None::<uint>, (|| expensive_computation(bar)).unless(true));
bar = 2;
assert_eq!(Some(84), (|| expensive_computation(bar)).when(true));
assert_eq!(None::<uint>, (|| expensive_computation(bar)).when(false));
}
|
mod testenv;
use std::fs;
use std::io::Write;
use std::path::Path;
use std::time::{Duration, SystemTime};
use normpath::PathExt;
use regex::escape;
use crate::testenv::TestEnv;
static DEFAULT_DIRS: &[&str] = &["one/two/three", "one/two/three/directory_foo"];
static DEFAULT_FILES: &[&str] = &[
"a.foo",
"one/b.foo",
"one/two/c.foo",
"one/two/C.Foo2",
"one/two/three/d.foo",
"fdignored.foo",
"gitignored.foo",
".hidden.foo",
"e1 e2",
];
fn get_absolute_root_path(env: &TestEnv) -> String {
let path = env
.test_root()
.normalize()
.expect("absolute path")
.as_path()
.to_str()
.expect("string")
.to_string();
#[cfg(windows)]
let path = path.trim_start_matches(r"\\?\").to_string();
path
}
#[cfg(test)]
fn get_test_env_with_abs_path(dirs: &[&'static str], files: &[&'static str]) -> (TestEnv, String) {
let env = TestEnv::new(dirs, files);
let root_path = get_absolute_root_path(&env);
(env, root_path)
}
#[cfg(test)]
fn create_file_with_size<P: AsRef<Path>>(path: P, size_in_bytes: usize) {
let content = "#".repeat(size_in_bytes);
let mut f = fs::File::create::<P>(path).unwrap();
f.write_all(content.as_bytes()).unwrap();
}
/// Simple test
#[test]
fn test_simple() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["a.foo"], "a.foo");
te.assert_output(&["b.foo"], "one/b.foo");
te.assert_output(&["d.foo"], "one/two/three/d.foo");
te.assert_output(
&["foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Test each pattern type with an empty pattern.
#[test]
fn test_empty_pattern() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let expected = "a.foo
e1 e2
one
one/b.foo
one/two
one/two/c.foo
one/two/C.Foo2
one/two/three
one/two/three/d.foo
one/two/three/directory_foo
symlink";
te.assert_output(&["--regex"], expected);
te.assert_output(&["--fixed-strings"], expected);
te.assert_output(&["--glob"], expected);
}
/// Test multiple directory searches
#[test]
fn test_multi_file() {
let dirs = &["test1", "test2"];
let files = &["test1/a.foo", "test1/b.foo", "test2/a.foo"];
let te = TestEnv::new(dirs, files);
te.assert_output(
&["a.foo", "test1", "test2"],
"test1/a.foo
test2/a.foo",
);
te.assert_output(
&["", "test1", "test2"],
"test1/a.foo
test2/a.foo
test1/b.foo",
);
te.assert_output(&["a.foo", "test1"], "test1/a.foo");
te.assert_output(&["b.foo", "test1", "test2"], "test1/b.foo");
}
/// Test search over multiple directory with missing
#[test]
fn test_multi_file_with_missing() {
let dirs = &["real"];
let files = &["real/a.foo", "real/b.foo"];
let te = TestEnv::new(dirs, files);
te.assert_output(&["a.foo", "real", "fake"], "real/a.foo");
te.assert_error(
&["a.foo", "real", "fake"],
"[fd error]: Search path 'fake' is not a directory.",
);
te.assert_output(
&["", "real", "fake"],
"real/a.foo
real/b.foo",
);
te.assert_output(
&["", "real", "fake1", "fake2"],
"real/a.foo
real/b.foo",
);
te.assert_error(
&["", "real", "fake1", "fake2"],
"[fd error]: Search path 'fake1' is not a directory.
[fd error]: Search path 'fake2' is not a directory.",
);
te.assert_failure_with_error(
&["", "fake1", "fake2"],
"[fd error]: Search path 'fake1' is not a directory.
[fd error]: Search path 'fake2' is not a directory.
[fd error]: No valid search paths given.",
);
}
/// Explicit root path
#[test]
fn test_explicit_root_path() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", "one"],
"one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "one/two/three"],
"one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output_subdirectory(
"one/two",
&["foo", "../../"],
"../../a.foo
../../one/b.foo
../../one/two/c.foo
../../one/two/C.Foo2
../../one/two/three/d.foo
../../one/two/three/directory_foo",
);
te.assert_output_subdirectory(
"one/two/three",
&["", ".."],
"../c.foo
../C.Foo2
../three
../three/d.foo
../three/directory_foo",
);
}
/// Regex searches
#[test]
fn test_regex_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(
&["--case-sensitive", "[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
}
/// Smart case
#[test]
fn test_smart_case() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["c.foo"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(&["C.Foo"], "one/two/C.Foo2");
te.assert_output(&["Foo"], "one/two/C.Foo2");
// Only literal uppercase chars should trigger case sensitivity.
te.assert_output(
&["\\Ac"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(&["\\AC"], "one/two/C.Foo2");
}
/// Case sensitivity (--case-sensitive)
#[test]
fn test_case_sensitive() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--case-sensitive", "c.foo"], "one/two/c.foo");
te.assert_output(&["--case-sensitive", "C.Foo"], "one/two/C.Foo2");
te.assert_output(
&["--ignore-case", "--case-sensitive", "C.Foo"],
"one/two/C.Foo2",
);
}
/// Case insensitivity (--ignore-case)
#[test]
fn test_case_insensitive() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--ignore-case", "C.Foo"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(
&["--case-sensitive", "--ignore-case", "C.Foo"],
"one/two/c.foo
one/two/C.Foo2",
);
}
/// Glob-based searches (--glob)
#[test]
fn test_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "*.foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--glob", "[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
te.assert_output(
&["--glob", "[a-c].foo*"],
"a.foo
one/b.foo
one/two/C.Foo2
one/two/c.foo",
);
}
/// Glob-based searches (--glob) in combination with full path searches (--full-path)
#[cfg(not(windows))] // TODO: make this work on Windows
#[test]
fn test_full_path_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "--full-path", "**/one/**/*.foo"],
"one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--glob", "--full-path", "**/one/*/*.foo"],
" one/two/c.foo",
);
te.assert_output(
&["--glob", "--full-path", "**/one/*/*/*.foo"],
" one/two/three/d.foo",
);
}
#[test]
fn test_smart_case_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "c.foo*"],
"one/two/C.Foo2
one/two/c.foo",
);
te.assert_output(&["--glob", "C.Foo*"], "one/two/C.Foo2");
}
/// Glob-based searches (--glob) in combination with --case-sensitive
#[test]
fn test_case_sensitive_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--glob", "--case-sensitive", "c.foo*"], "one/two/c.foo");
}
/// Glob-based searches (--glob) in combination with --extension
#[test]
fn test_glob_searches_with_extension() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "--extension", "foo2", "[a-z].*"],
"one/two/C.Foo2",
);
}
/// Make sure that --regex overrides --glob
#[test]
fn test_regex_overrides_glob() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--glob", "--regex", "Foo2$"], "one/two/C.Foo2");
}
/// Full path search (--full-path)
#[test]
fn test_full_path() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
te.assert_output(
&[
"--full-path",
&format!("^{prefix}.*three.*foo$", prefix = prefix),
],
"one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Hidden files (--hidden)
#[test]
fn test_hidden() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--hidden", "foo"],
".hidden.foo
a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Hidden file attribute on Windows
#[cfg(windows)]
#[test]
fn test_hidden_file_attribute() {
use std::os::windows::fs::OpenOptionsExt;
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-setfileattributesa
const FILE_ATTRIBUTE_HIDDEN: u32 = 2;
fs::OpenOptions::new()
.create(true)
.write(true)
.attributes(FILE_ATTRIBUTE_HIDDEN)
.open(te.test_root().join("hidden-file.txt"))
.unwrap();
te.assert_output(&["--hidden", "hidden-file.txt"], "hidden-file.txt");
te.assert_output(&["hidden-file.txt"], "");
}
/// Ignored files (--no-ignore)
#[test]
fn test_no_ignore() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--no-ignore", "foo"],
"a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["--hidden", "--no-ignore", "foo"],
".hidden.foo
a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// .gitignore and .fdignore
#[test]
fn test_gitignore_and_fdignore() {
let files = &[
"ignored-by-nothing",
"ignored-by-fdignore",
"ignored-by-gitignore",
"ignored-by-both",
];
let te = TestEnv::new(&[], files);
fs::File::create(te.test_root().join(".fdignore"))
.unwrap()
.write_all(b"ignored-by-fdignore\nignored-by-both")
.unwrap();
fs::File::create(te.test_root().join(".gitignore"))
.unwrap()
.write_all(b"ignored-by-gitignore\nignored-by-both")
.unwrap();
te.assert_output(&["ignored"], "ignored-by-nothing");
te.assert_output(
&["--no-ignore-vcs", "ignored"],
"ignored-by-nothing
ignored-by-gitignore",
);
te.assert_output(
&["--no-ignore", "ignored"],
"ignored-by-nothing
ignored-by-fdignore
ignored-by-gitignore
ignored-by-both",
);
}
/// Precedence of .fdignore files
#[test]
fn test_custom_ignore_precedence() {
let dirs = &["inner"];
let files = &["inner/foo"];
let te = TestEnv::new(dirs, files);
// Ignore 'foo' via .gitignore
fs::File::create(te.test_root().join("inner/.gitignore"))
.unwrap()
.write_all(b"foo")
.unwrap();
// Whitelist 'foo' via .fdignore
fs::File::create(te.test_root().join(".fdignore"))
.unwrap()
.write_all(b"!foo")
.unwrap();
te.assert_output(&["foo"], "inner/foo");
te.assert_output(&["--no-ignore-vcs", "foo"], "inner/foo");
te.assert_output(&["--no-ignore", "foo"], "inner/foo");
}
/// VCS ignored files (--no-ignore-vcs)
#[test]
fn test_no_ignore_vcs() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--no-ignore-vcs", "foo"],
"a.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Custom ignore files (--ignore-file)
#[test]
fn test_custom_ignore_files() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Ignore 'C.Foo2' and everything in 'three'.
fs::File::create(te.test_root().join("custom.ignore"))
.unwrap()
.write_all(b"C.Foo2\nthree")
.unwrap();
te.assert_output(
&["--ignore-file", "custom.ignore", "foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
}
/// Ignored files with ripgrep aliases (-u / -uu)
#[test]
fn test_no_ignore_aliases() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["-u", "foo"],
"a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["-uu", "foo"],
".hidden.foo
a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Symlinks (--follow)
#[test]
fn test_follow() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--follow", "c.foo"],
"one/two/c.foo
one/two/C.Foo2
symlink/c.foo
symlink/C.Foo2",
);
}
// File system boundaries (--one-file-system)
// Limited to Unix because, to the best of my knowledge, there is no easy way to test a use case
// file systems mounted into the tree on Windows.
// Not limiting depth causes massive delay under Darwin, see BurntSushi/ripgrep#1429
#[test]
#[cfg(unix)]
fn test_file_system_boundaries() {
// Helper function to get the device ID for a given path
// Inspired by https://github.com/BurntSushi/ripgrep/blob/8892bf648cfec111e6e7ddd9f30e932b0371db68/ignore/src/walk.rs#L1693
fn device_num(path: impl AsRef<Path>) -> u64 {
use std::os::unix::fs::MetadataExt;
path.as_ref().metadata().map(|md| md.dev()).unwrap()
}
// Can't simulate file system boundaries
let te = TestEnv::new(&[], &[]);
let dev_null = Path::new("/dev/null");
// /dev/null should exist in all sane Unixes. Skip if it doesn't exist for some reason.
// Also skip should it be on the same device as the root partition for some reason.
if !dev_null.is_file() || device_num(dev_null) == device_num("/") {
return;
}
te.assert_output(
&["--full-path", "--max-depth", "2", "^/dev/null$", "/"],
"/dev/null",
);
te.assert_output(
&[
"--one-file-system",
"--full-path",
"--max-depth",
"2",
"^/dev/null$",
"/",
],
"",
);
}
#[test]
fn test_follow_broken_symlink() {
let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.create_broken_symlink("broken_symlink")
.expect("Failed to create broken symlink.");
te.assert_output(
&["symlink"],
"broken_symlink
symlink",
);
te.assert_output(
&["--type", "symlink", "symlink"],
"broken_symlink
symlink",
);
te.assert_output(&["--type", "file", "symlink"], "");
te.assert_output(
&["--follow", "--type", "symlink", "symlink"],
"broken_symlink",
);
te.assert_output(&["--follow", "--type", "file", "symlink"], "");
}
/// Null separator (--print0)
#[test]
fn test_print0() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--print0", "foo"],
"a.fooNULL
one/b.fooNULL
one/two/C.Foo2NULL
one/two/c.fooNULL
one/two/three/d.fooNULL
one/two/three/directory_fooNULL",
);
}
/// Maximum depth (--max-depth)
#[test]
fn test_max_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--max-depth", "3"],
"a.foo
e1 e2
one
one/b.foo
one/two
one/two/c.foo
one/two/C.Foo2
one/two/three
symlink",
);
te.assert_output(
&["--max-depth", "2"],
"a.foo
e1 e2
one
one/b.foo
one/two
symlink",
);
te.assert_output(
&["--max-depth", "1"],
"a.foo
e1 e2
one
symlink",
);
}
/// Minimum depth (--min-depth)
#[test]
fn test_min_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--min-depth", "3"],
"one/two/c.foo
one/two/C.Foo2
one/two/three
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["--min-depth", "4"],
"one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Exact depth (--exact-depth)
#[test]
fn test_exact_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--exact-depth", "3"],
"one/two/c.foo
one/two/C.Foo2
one/two/three",
);
}
/// Pruning (--prune)
#[test]
fn test_prune() {
let dirs = &["foo/bar", "bar/foo", "baz"];
let files = &[
"foo/foo.file",
"foo/bar/foo.file",
"bar/foo.file",
"bar/foo/foo.file",
"baz/foo.file",
];
let te = TestEnv::new(dirs, files);
te.assert_output(
&["foo"],
"foo
foo/foo.file
foo/bar/foo.file
bar/foo.file
bar/foo
bar/foo/foo.file
baz/foo.file",
);
te.assert_output(
&["--prune", "foo"],
"foo
bar/foo
bar/foo.file
baz/foo.file",
);
}
/// Absolute paths (--absolute-path)
#[test]
fn test_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--absolute-path"],
&format!(
"{abs_path}/a.foo
{abs_path}/e1 e2
{abs_path}/one
{abs_path}/one/b.foo
{abs_path}/one/two
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo
{abs_path}/symlink",
abs_path = &abs_path
),
);
te.assert_output(
&["--absolute-path", "foo"],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Show absolute paths if the path argument is absolute
#[test]
fn test_implicit_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", &abs_path],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Absolute paths should be normalized
#[test]
fn test_normalized_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output_subdirectory(
"one",
&["--absolute-path", "foo", ".."],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// File type filter (--type)
#[test]
fn test_type() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--type", "f"],
"a.foo
e1 e2
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo",
);
te.assert_output(&["--type", "f", "e1"], "e1 e2");
te.assert_output(
&["--type", "d"],
"one
one/two
one/two/three
one/two/three/directory_foo",
);
te.assert_output(
&["--type", "d", "--type", "l"],
"one
one/two
one/two/three
one/two/three/directory_foo
symlink",
);
te.assert_output(&["--type", "l"], "symlink");
}
/// Test `--type executable`
#[cfg(unix)]
#[test]
fn test_type_executable() {
use std::os::unix::fs::OpenOptionsExt;
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
fs::OpenOptions::new()
.create(true)
.write(true)
.mode(0o777)
.open(te.test_root().join("executable-file.sh"))
.unwrap();
te.assert_output(&["--type", "executable"], "executable-file.sh");
te.assert_output(
&["--type", "executable", "--type", "directory"],
"executable-file.sh
one
one/two
one/two/three
one/two/three/directory_foo",
);
}
/// Test `--type empty`
#[test]
fn test_type_empty() {
let te = TestEnv::new(&["dir_empty", "dir_nonempty"], &[]);
create_file_with_size(te.test_root().join("0_bytes.foo"), 0);
create_file_with_size(te.test_root().join("5_bytes.foo"), 5);
create_file_with_size(te.test_root().join("dir_nonempty").join("2_bytes.foo"), 2);
te.assert_output(
&["--type", "empty"],
"0_bytes.foo
dir_empty",
);
te.assert_output(
&["--type", "empty", "--type", "file", "--type", "directory"],
"0_bytes.foo
dir_empty",
);
te.assert_output(&["--type", "empty", "--type", "file"], "0_bytes.foo");
te.assert_output(&["--type", "empty", "--type", "directory"], "dir_empty");
}
/// File extension (--extension)
#[test]
fn test_extension() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--extension", "foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--extension", ".foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--extension", ".foo", "--extension", "foo2"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo
one/two/C.Foo2",
);
te.assert_output(&["--extension", ".foo", "a"], "a.foo");
te.assert_output(&["--extension", "foo2"], "one/two/C.Foo2");
let te2 = TestEnv::new(&[], &["spam.bar.baz", "egg.bar.baz", "yolk.bar.baz.sig"]);
te2.assert_output(
&["--extension", ".bar.baz"],
"spam.bar.baz
egg.bar.baz",
);
te2.assert_output(&["--extension", "sig"], "yolk.bar.baz.sig");
te2.assert_output(&["--extension", "bar.baz.sig"], "yolk.bar.baz.sig");
let te3 = TestEnv::new(&[], &["latin1.e\u{301}xt", "smiley.☻"]);
te3.assert_output(&["--extension", "☻"], "smiley.☻");
te3.assert_output(&["--extension", ".e\u{301}xt"], "latin1.e\u{301}xt");
let te4 = TestEnv::new(&[], &[".hidden", "test.hidden"]);
te4.assert_output(&["--hidden", "--extension", ".hidden"], "test.hidden");
}
/// No file extension (test for the pattern provided in the --help text)
#[test]
fn test_no_extension() {
let te = TestEnv::new(
DEFAULT_DIRS,
&["a.foo", "aa", "one/b.foo", "one/bb", "one/two/three/d"],
);
te.assert_output(
&["^[^.]+$"],
"aa
one
one/bb
one/two
one/two/three
one/two/three/d
one/two/three/directory_foo
symlink",
);
te.assert_output(
&["^[^.]+$", "--type", "file"],
"aa
one/bb
one/two/three/d",
);
}
/// Symlink as search directory
#[test]
fn test_symlink_as_root() {
let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.create_broken_symlink("broken_symlink")
.expect("Failed to create broken symlink.");
// From: http://pubs.opengroup.org/onlinepubs/9699919799/functions/getcwd.html
// The getcwd() function shall place an absolute pathname of the current working directory in
// the array pointed to by buf, and return buf. The pathname shall contain no components that
// are dot or dot-dot, or are symbolic links.
//
// Key points:
// 1. The path of the current working directory of a Unix process cannot contain symlinks.
// 2. The path of the current working directory of a Windows process can contain symlinks.
//
// More:
// 1. On Windows, symlinks are resolved after the ".." component.
// 2. On Unix, symlinks are resolved immediately as encountered.
let parent_parent = if cfg!(windows) { ".." } else { "../.." };
te.assert_output_subdirectory(
"symlink",
&["", parent_parent],
&format!(
"{dir}/a.foo
{dir}/broken_symlink
{dir}/e1 e2
{dir}/one
{dir}/one/b.foo
{dir}/one/two
{dir}/one/two/c.foo
{dir}/one/two/C.Foo2
{dir}/one/two/three
{dir}/one/two/three/d.foo
{dir}/one/two/three/directory_foo
{dir}/symlink",
dir = &parent_parent
),
);
}
#[test]
fn test_symlink_and_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let expected_path = if cfg!(windows) { "symlink" } else { "one/two" };
te.assert_output_subdirectory(
"symlink",
&["--absolute-path"],
&format!(
"{abs_path}/{expected_path}/c.foo
{abs_path}/{expected_path}/C.Foo2
{abs_path}/{expected_path}/three
{abs_path}/{expected_path}/three/d.foo
{abs_path}/{expected_path}/three/directory_foo",
abs_path = &abs_path,
expected_path = expected_path
),
);
}
#[test]
fn test_symlink_as_absolute_root() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["", &format!("{abs_path}/symlink", abs_path = abs_path)],
&format!(
"{abs_path}/symlink/c.foo
{abs_path}/symlink/C.Foo2
{abs_path}/symlink/three
{abs_path}/symlink/three/d.foo
{abs_path}/symlink/three/directory_foo",
abs_path = &abs_path
),
);
}
#[test]
fn test_symlink_and_full_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
let expected_path = if cfg!(windows) { "symlink" } else { "one/two" };
te.assert_output_subdirectory(
"symlink",
&[
"--absolute-path",
"--full-path",
&format!("^{prefix}.*three", prefix = prefix),
],
&format!(
"{abs_path}/{expected_path}/three
{abs_path}/{expected_path}/three/d.foo
{abs_path}/{expected_path}/three/directory_foo",
abs_path = &abs_path,
expected_path = expected_path
),
);
}
#[test]
fn test_symlink_and_full_path_abs_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
te.assert_output(
&[
"--full-path",
&format!("^{prefix}.*symlink.*three", prefix = prefix),
&format!("{abs_path}/symlink", abs_path = abs_path),
],
&format!(
"{abs_path}/symlink/three
{abs_path}/symlink/three/d.foo
{abs_path}/symlink/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Exclude patterns (--exclude)
#[test]
fn test_excludes() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--exclude", "*.foo"],
"one
one/two
one/two/C.Foo2
one/two/three
one/two/three/directory_foo
e1 e2
symlink",
);
te.assert_output(
&["--exclude", "*.foo", "--exclude", "*.Foo2"],
"one
one/two
one/two/three
one/two/three/directory_foo
e1 e2
symlink",
);
te.assert_output(
&["--exclude", "*.foo", "--exclude", "*.Foo2", "foo"],
"one/two/three/directory_foo",
);
te.assert_output(
&["--exclude", "one/two", "foo"],
"a.foo
one/b.foo",
);
te.assert_output(
&["--exclude", "one/**/*.foo"],
"a.foo
e1 e2
one
one/two
one/two/C.Foo2
one/two/three
one/two/three/directory_foo
symlink",
);
}
/// Shell script execution (--exec)
#[test]
fn test_exec() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
// TODO Windows tests: D:file.txt \file.txt \\server\share\file.txt ...
if !cfg!(windows) {
te.assert_output(
&["--absolute-path", "foo", "--exec", "echo"],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/c.foo
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
te.assert_output(
&["foo", "--exec", "echo", "{}"],
"a.foo
one/b.foo
one/two/C.Foo2
one/two/c.foo
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{.}"],
"a
one/b
one/two/C
one/two/c
one/two/three/d
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{/}"],
"a.foo
b.foo
C.Foo2
c.foo
d.foo
directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{/.}"],
"a
b
C
c
d
directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{//}"],
".
one
one/two
one/two
one/two/three
one/two/three",
);
te.assert_output(&["e1", "--exec", "printf", "%s.%s\n"], "e1 e2.");
}
}
#[test]
fn test_exec_batch() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let te = te.normalize_line(true);
// TODO Test for windows
if !cfg!(windows) {
te.assert_output(
&["--absolute-path", "foo", "--exec-batch", "echo"],
&format!(
"{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/c.foo {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
te.assert_output(
&["foo", "--exec-batch", "echo", "{}"],
"a.foo one/b.foo one/two/C.Foo2 one/two/c.foo one/two/three/d.foo one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec-batch", "echo", "{/}"],
"a.foo b.foo C.Foo2 c.foo d.foo directory_foo",
);
te.assert_output(
&["no_match", "--exec-batch", "echo", "Matched: ", "{/}"],
"",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo", "{}", "{}"],
"[fd error]: Only one placeholder allowed for batch commands",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"],
"error: The argument '--exec <cmd>' cannot be used with '--exec-batch <cmd>'",
);
te.assert_failure_with_error(
&["foo", "--exec-batch"],
"error: The argument '--exec-batch <cmd>' requires a value but none was supplied",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo {}"],
"[fd error]: First argument of exec-batch is expected to be a fixed executable",
);
}
}
/// Shell script execution (--exec) with a custom --path-separator
#[test]
fn test_exec_with_separator() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&[
"--path-separator=#",
"--absolute-path",
"foo",
"--exec",
"echo",
],
&format!(
"{abs_path}#a.foo
{abs_path}#one#b.foo
{abs_path}#one#two#C.Foo2
{abs_path}#one#two#c.foo
{abs_path}#one#two#three#d.foo
{abs_path}#one#two#three#directory_foo",
abs_path = abs_path.replace(std::path::MAIN_SEPARATOR, "#"),
),
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{}"],
"a.foo
one#b.foo
one#two#C.Foo2
one#two#c.foo
one#two#three#d.foo
one#two#three#directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{.}"],
"a
one#b
one#two#C
one#two#c
one#two#three#d
one#two#three#directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{/}"],
"a.foo
b.foo
C.Foo2
c.foo
d.foo
directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{/.}"],
"a
b
C
c
d
directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{//}"],
".
one
one#two
one#two
one#two#three
one#two#three",
);
te.assert_output(
&["--path-separator=#", "e1", "--exec", "printf", "%s.%s\n"],
"e1 e2.",
);
}
/// Literal search (--fixed-strings)
#[test]
fn test_fixed_strings() {
let dirs = &["test1", "test2"];
let files = &["test1/a.foo", "test1/a_foo", "test2/Download (1).tar.gz"];
let te = TestEnv::new(dirs, files);
// Regex search, dot is treated as "any character"
te.assert_output(
&["a.foo"],
"test1/a.foo
test1/a_foo",
);
// Literal search, dot is treated as character
te.assert_output(&["--fixed-strings", "a.foo"], "test1/a.foo");
// Regex search, parens are treated as group
te.assert_output(&["download (1)"], "");
// Literal search, parens are treated as characters
te.assert_output(
&["--fixed-strings", "download (1)"],
"test2/Download (1).tar.gz",
);
// Combine with --case-sensitive
te.assert_output(&["--fixed-strings", "--case-sensitive", "download (1)"], "");
}
/// Filenames with invalid UTF-8 sequences
#[cfg(target_os = "linux")]
#[test]
fn test_invalid_utf8() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
let dirs = &["test1"];
let files = &[];
let te = TestEnv::new(dirs, files);
fs::File::create(
te.test_root()
.join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")),
)
.unwrap();
te.assert_output(&["", "test1/"], "test1/test_�invalid.txt");
te.assert_output(&["invalid", "test1/"], "test1/test_�invalid.txt");
// Should not be found under a different extension
te.assert_output(&["-e", "zip", "", "test1/"], "");
}
/// Filtering for file size (--size)
#[test]
fn test_size() {
let te = TestEnv::new(&[], &[]);
create_file_with_size(te.test_root().join("0_bytes.foo"), 0);
create_file_with_size(te.test_root().join("11_bytes.foo"), 11);
create_file_with_size(te.test_root().join("30_bytes.foo"), 30);
create_file_with_size(te.test_root().join("3_kilobytes.foo"), 3 * 1000);
create_file_with_size(te.test_root().join("4_kibibytes.foo"), 4 * 1024);
// Zero and non-zero sized files.
te.assert_output(
&["", "--size", "+0B"],
"0_bytes.foo
11_bytes.foo
30_bytes.foo
3_kilobytes.foo
4_kibibytes.foo",
);
// Zero sized files.
te.assert_output(&["", "--size", "-0B"], "0_bytes.foo");
te.assert_output(&["", "--size", "0B"], "0_bytes.foo");
te.assert_output(&["", "--size=0B"], "0_bytes.foo");
te.assert_output(&["", "-S", "0B"], "0_bytes.foo");
// Files with 2 bytes or more.
te.assert_output(
&["", "--size", "+2B"],
"11_bytes.foo
30_bytes.foo
3_kilobytes.foo
4_kibibytes.foo",
);
// Files with 2 bytes or less.
te.assert_output(&["", "--size", "-2B"], "0_bytes.foo");
// Files with size between 1 byte and 11 bytes.
te.assert_output(&["", "--size", "+1B", "--size", "-11B"], "11_bytes.foo");
// Files with size equal 11 bytes.
te.assert_output(&["", "--size", "11B"], "11_bytes.foo");
// Files with size between 1 byte and 30 bytes.
te.assert_output(
&["", "--size", "+1B", "--size", "-30B"],
"11_bytes.foo
30_bytes.foo",
);
// Combine with a search pattern
te.assert_output(&["^11_", "--size", "+1B", "--size", "-30B"], "11_bytes.foo");
// Files with size between 12 and 30 bytes.
te.assert_output(&["", "--size", "+12B", "--size", "-30B"], "30_bytes.foo");
// Files with size between 31 and 100 bytes.
te.assert_output(&["", "--size", "+31B", "--size", "-100B"], "");
// Files with size between 3 kibibytes and 5 kibibytes.
te.assert_output(&["", "--size", "+3ki", "--size", "-5ki"], "4_kibibytes.foo");
// Files with size between 3 kilobytes and 5 kilobytes.
te.assert_output(
&["", "--size", "+3k", "--size", "-5k"],
"3_kilobytes.foo
4_kibibytes.foo",
);
// Files with size greater than 3 kilobytes and less than 3 kibibytes.
te.assert_output(&["", "--size", "+3k", "--size", "-3ki"], "3_kilobytes.foo");
// Files with size equal 4 kibibytes.
te.assert_output(&["", "--size", "+4ki", "--size", "-4ki"], "4_kibibytes.foo");
te.assert_output(&["", "--size", "4ki"], "4_kibibytes.foo");
}
#[cfg(test)]
fn create_file_with_modified<P: AsRef<Path>>(path: P, duration_in_secs: u64) {
let st = SystemTime::now() - Duration::from_secs(duration_in_secs);
let ft = filetime::FileTime::from_system_time(st);
fs::File::create(&path).expect("creation failed");
filetime::set_file_times(&path, ft, ft).expect("time modification failed");
}
#[test]
fn test_modified_relative() {
let te = TestEnv::new(&[], &[]);
create_file_with_modified(te.test_root().join("foo_0_now"), 0);
create_file_with_modified(te.test_root().join("bar_1_min"), 60);
create_file_with_modified(te.test_root().join("foo_10_min"), 600);
create_file_with_modified(te.test_root().join("bar_1_h"), 60 * 60);
create_file_with_modified(te.test_root().join("foo_2_h"), 2 * 60 * 60);
create_file_with_modified(te.test_root().join("bar_1_day"), 24 * 60 * 60);
te.assert_output(
&["", "--changed-within", "15min"],
"foo_0_now
bar_1_min
foo_10_min",
);
te.assert_output(
&["", "--change-older-than", "15min"],
"bar_1_h
foo_2_h
bar_1_day",
);
te.assert_output(
&["foo", "--changed-within", "12h"],
"foo_0_now
foo_10_min
foo_2_h",
);
}
#[cfg(test)]
fn change_file_modified<P: AsRef<Path>>(path: P, iso_date: &str) {
let st = humantime::parse_rfc3339(iso_date).expect("invalid date");
let ft = filetime::FileTime::from_system_time(st);
filetime::set_file_times(path, ft, ft).expect("time modification failde");
}
#[test]
fn test_modified_asolute() {
let te = TestEnv::new(&[], &["15mar2018", "30dec2017"]);
change_file_modified(te.test_root().join("15mar2018"), "2018-03-15T12:00:00Z");
change_file_modified(te.test_root().join("30dec2017"), "2017-12-30T23:59:00Z");
te.assert_output(
&["", "--change-newer-than", "2018-01-01 00:00:00"],
"15mar2018",
);
te.assert_output(
&["", "--changed-before", "2018-01-01 00:00:00"],
"30dec2017",
);
}
#[test]
fn test_custom_path_separator() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", "one", "--path-separator", "="],
"one=b.foo
one=two=c.foo
one=two=C.Foo2
one=two=three=d.foo
one=two=three=directory_foo",
);
}
#[test]
fn test_base_directory() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--base-directory", "one"],
"b.foo
two
two/c.foo
two/C.Foo2
two/three
two/three/d.foo
two/three/directory_foo",
);
te.assert_output(
&["--base-directory", "one/two", "foo"],
"c.foo
C.Foo2
three/d.foo
three/directory_foo",
);
// Explicit root path
te.assert_output(
&["--base-directory", "one", "foo", "two"],
"two/c.foo
two/C.Foo2
two/three/d.foo
two/three/directory_foo",
);
// Ignore base directory when absolute path is used
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let abs_base_dir = &format!("{abs_path}/one/two", abs_path = &abs_path);
te.assert_output(
&["--base-directory", abs_base_dir, "foo", &abs_path],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
#[test]
fn test_max_results() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Unrestricted
te.assert_output(
&["--max-results=0", "c.foo"],
"one/two/C.Foo2
one/two/c.foo",
);
// Limited to two results
te.assert_output(
&["--max-results=2", "c.foo"],
"one/two/C.Foo2
one/two/c.foo",
);
// Limited to one result. We could find either C.Foo2 or c.foo
let assert_just_one_result_with_option = |option| {
let output = te.assert_success_and_get_output(".", &[option, "c.foo"]);
let stdout = String::from_utf8_lossy(&output.stdout)
.trim()
.replace(&std::path::MAIN_SEPARATOR.to_string(), "/");
assert!(stdout == "one/two/C.Foo2" || stdout == "one/two/c.foo");
};
assert_just_one_result_with_option("--max-results=1");
assert_just_one_result_with_option("-1");
}
/// Filenames with non-utf8 paths are passed to the executed program unchanged
///
/// Note:
/// - the test is disabled on Darwin/OSX, since it coerces file names to UTF-8,
/// even when the requested file name is not valid UTF-8.
/// - the test is currently disabled on Windows because I'm not sure how to create
/// invalid UTF-8 files on Windows
#[cfg(all(unix, not(target_os = "macos")))]
#[test]
fn test_exec_invalid_utf8() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
let dirs = &["test1"];
let files = &[];
let te = TestEnv::new(dirs, files);
fs::File::create(
te.test_root()
.join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")),
)
.unwrap();
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{}"],
b"test1/test_\xFEinvalid.txt\n",
);
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{/}"],
b"test_\xFEinvalid.txt\n",
);
te.assert_output_raw(&["", "test1/", "--exec", "echo", "{//}"], b"test1\n");
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{.}"],
b"test1/test_\xFEinvalid\n",
);
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{/.}"],
b"test_\xFEinvalid\n",
);
}
#[test]
fn test_list_details() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Make sure we can execute 'fd --list-details' without any errors.
te.assert_success_and_get_output(".", &["--list-details"]);
}
/// Make sure that fd fails if numeric arguments can not be parsed
#[test]
fn test_number_parsing_errors() {
let te = TestEnv::new(&[], &[]);
te.assert_failure(&["--threads=a"]);
te.assert_failure(&["-j", ""]);
te.assert_failure(&["--threads=0"]);
te.assert_failure(&["--min-depth=a"]);
te.assert_failure(&["--max-depth=a"]);
te.assert_failure(&["--maxdepth=a"]);
te.assert_failure(&["--exact-depth=a"]);
te.assert_failure(&["--max-buffer-time=a"]);
te.assert_failure(&["--max-results=a"]);
}
/// Print error if search pattern starts with a dot and --hidden is not set
/// (Unix only, hidden files on Windows work differently)
#[test]
#[cfg(unix)]
fn test_error_if_hidden_not_set_and_pattern_starts_with_dot() {
let te = TestEnv::new(&[], &[".gitignore", ".whatever", "non-hidden"]);
te.assert_failure(&["^\\.gitignore"]);
te.assert_failure(&["--glob", ".gitignore"]);
te.assert_output(&["--hidden", "^\\.gitignore"], ".gitignore");
te.assert_output(&["--hidden", "--glob", ".gitignore"], ".gitignore");
te.assert_output(&[".gitignore"], "");
}
Silence wrong clippy warning
mod testenv;
use std::fs;
use std::io::Write;
use std::path::Path;
use std::time::{Duration, SystemTime};
use normpath::PathExt;
use regex::escape;
use crate::testenv::TestEnv;
static DEFAULT_DIRS: &[&str] = &["one/two/three", "one/two/three/directory_foo"];
static DEFAULT_FILES: &[&str] = &[
"a.foo",
"one/b.foo",
"one/two/c.foo",
"one/two/C.Foo2",
"one/two/three/d.foo",
"fdignored.foo",
"gitignored.foo",
".hidden.foo",
"e1 e2",
];
#[allow(clippy::let_and_return)]
fn get_absolute_root_path(env: &TestEnv) -> String {
let path = env
.test_root()
.normalize()
.expect("absolute path")
.as_path()
.to_str()
.expect("string")
.to_string();
#[cfg(windows)]
let path = path.trim_start_matches(r"\\?\").to_string();
path
}
#[cfg(test)]
fn get_test_env_with_abs_path(dirs: &[&'static str], files: &[&'static str]) -> (TestEnv, String) {
let env = TestEnv::new(dirs, files);
let root_path = get_absolute_root_path(&env);
(env, root_path)
}
#[cfg(test)]
fn create_file_with_size<P: AsRef<Path>>(path: P, size_in_bytes: usize) {
let content = "#".repeat(size_in_bytes);
let mut f = fs::File::create::<P>(path).unwrap();
f.write_all(content.as_bytes()).unwrap();
}
/// Simple test
#[test]
fn test_simple() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["a.foo"], "a.foo");
te.assert_output(&["b.foo"], "one/b.foo");
te.assert_output(&["d.foo"], "one/two/three/d.foo");
te.assert_output(
&["foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Test each pattern type with an empty pattern.
#[test]
fn test_empty_pattern() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let expected = "a.foo
e1 e2
one
one/b.foo
one/two
one/two/c.foo
one/two/C.Foo2
one/two/three
one/two/three/d.foo
one/two/three/directory_foo
symlink";
te.assert_output(&["--regex"], expected);
te.assert_output(&["--fixed-strings"], expected);
te.assert_output(&["--glob"], expected);
}
/// Test multiple directory searches
#[test]
fn test_multi_file() {
let dirs = &["test1", "test2"];
let files = &["test1/a.foo", "test1/b.foo", "test2/a.foo"];
let te = TestEnv::new(dirs, files);
te.assert_output(
&["a.foo", "test1", "test2"],
"test1/a.foo
test2/a.foo",
);
te.assert_output(
&["", "test1", "test2"],
"test1/a.foo
test2/a.foo
test1/b.foo",
);
te.assert_output(&["a.foo", "test1"], "test1/a.foo");
te.assert_output(&["b.foo", "test1", "test2"], "test1/b.foo");
}
/// Test search over multiple directory with missing
#[test]
fn test_multi_file_with_missing() {
let dirs = &["real"];
let files = &["real/a.foo", "real/b.foo"];
let te = TestEnv::new(dirs, files);
te.assert_output(&["a.foo", "real", "fake"], "real/a.foo");
te.assert_error(
&["a.foo", "real", "fake"],
"[fd error]: Search path 'fake' is not a directory.",
);
te.assert_output(
&["", "real", "fake"],
"real/a.foo
real/b.foo",
);
te.assert_output(
&["", "real", "fake1", "fake2"],
"real/a.foo
real/b.foo",
);
te.assert_error(
&["", "real", "fake1", "fake2"],
"[fd error]: Search path 'fake1' is not a directory.
[fd error]: Search path 'fake2' is not a directory.",
);
te.assert_failure_with_error(
&["", "fake1", "fake2"],
"[fd error]: Search path 'fake1' is not a directory.
[fd error]: Search path 'fake2' is not a directory.
[fd error]: No valid search paths given.",
);
}
/// Explicit root path
#[test]
fn test_explicit_root_path() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", "one"],
"one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "one/two/three"],
"one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output_subdirectory(
"one/two",
&["foo", "../../"],
"../../a.foo
../../one/b.foo
../../one/two/c.foo
../../one/two/C.Foo2
../../one/two/three/d.foo
../../one/two/three/directory_foo",
);
te.assert_output_subdirectory(
"one/two/three",
&["", ".."],
"../c.foo
../C.Foo2
../three
../three/d.foo
../three/directory_foo",
);
}
/// Regex searches
#[test]
fn test_regex_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(
&["--case-sensitive", "[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
}
/// Smart case
#[test]
fn test_smart_case() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["c.foo"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(&["C.Foo"], "one/two/C.Foo2");
te.assert_output(&["Foo"], "one/two/C.Foo2");
// Only literal uppercase chars should trigger case sensitivity.
te.assert_output(
&["\\Ac"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(&["\\AC"], "one/two/C.Foo2");
}
/// Case sensitivity (--case-sensitive)
#[test]
fn test_case_sensitive() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--case-sensitive", "c.foo"], "one/two/c.foo");
te.assert_output(&["--case-sensitive", "C.Foo"], "one/two/C.Foo2");
te.assert_output(
&["--ignore-case", "--case-sensitive", "C.Foo"],
"one/two/C.Foo2",
);
}
/// Case insensitivity (--ignore-case)
#[test]
fn test_case_insensitive() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--ignore-case", "C.Foo"],
"one/two/c.foo
one/two/C.Foo2",
);
te.assert_output(
&["--case-sensitive", "--ignore-case", "C.Foo"],
"one/two/c.foo
one/two/C.Foo2",
);
}
/// Glob-based searches (--glob)
#[test]
fn test_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "*.foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--glob", "[a-c].foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
te.assert_output(
&["--glob", "[a-c].foo*"],
"a.foo
one/b.foo
one/two/C.Foo2
one/two/c.foo",
);
}
/// Glob-based searches (--glob) in combination with full path searches (--full-path)
#[cfg(not(windows))] // TODO: make this work on Windows
#[test]
fn test_full_path_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "--full-path", "**/one/**/*.foo"],
"one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--glob", "--full-path", "**/one/*/*.foo"],
" one/two/c.foo",
);
te.assert_output(
&["--glob", "--full-path", "**/one/*/*/*.foo"],
" one/two/three/d.foo",
);
}
#[test]
fn test_smart_case_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "c.foo*"],
"one/two/C.Foo2
one/two/c.foo",
);
te.assert_output(&["--glob", "C.Foo*"], "one/two/C.Foo2");
}
/// Glob-based searches (--glob) in combination with --case-sensitive
#[test]
fn test_case_sensitive_glob_searches() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--glob", "--case-sensitive", "c.foo*"], "one/two/c.foo");
}
/// Glob-based searches (--glob) in combination with --extension
#[test]
fn test_glob_searches_with_extension() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--glob", "--extension", "foo2", "[a-z].*"],
"one/two/C.Foo2",
);
}
/// Make sure that --regex overrides --glob
#[test]
fn test_regex_overrides_glob() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(&["--glob", "--regex", "Foo2$"], "one/two/C.Foo2");
}
/// Full path search (--full-path)
#[test]
fn test_full_path() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
te.assert_output(
&[
"--full-path",
&format!("^{prefix}.*three.*foo$", prefix = prefix),
],
"one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Hidden files (--hidden)
#[test]
fn test_hidden() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--hidden", "foo"],
".hidden.foo
a.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Hidden file attribute on Windows
#[cfg(windows)]
#[test]
fn test_hidden_file_attribute() {
use std::os::windows::fs::OpenOptionsExt;
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-setfileattributesa
const FILE_ATTRIBUTE_HIDDEN: u32 = 2;
fs::OpenOptions::new()
.create(true)
.write(true)
.attributes(FILE_ATTRIBUTE_HIDDEN)
.open(te.test_root().join("hidden-file.txt"))
.unwrap();
te.assert_output(&["--hidden", "hidden-file.txt"], "hidden-file.txt");
te.assert_output(&["hidden-file.txt"], "");
}
/// Ignored files (--no-ignore)
#[test]
fn test_no_ignore() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--no-ignore", "foo"],
"a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["--hidden", "--no-ignore", "foo"],
".hidden.foo
a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// .gitignore and .fdignore
#[test]
fn test_gitignore_and_fdignore() {
let files = &[
"ignored-by-nothing",
"ignored-by-fdignore",
"ignored-by-gitignore",
"ignored-by-both",
];
let te = TestEnv::new(&[], files);
fs::File::create(te.test_root().join(".fdignore"))
.unwrap()
.write_all(b"ignored-by-fdignore\nignored-by-both")
.unwrap();
fs::File::create(te.test_root().join(".gitignore"))
.unwrap()
.write_all(b"ignored-by-gitignore\nignored-by-both")
.unwrap();
te.assert_output(&["ignored"], "ignored-by-nothing");
te.assert_output(
&["--no-ignore-vcs", "ignored"],
"ignored-by-nothing
ignored-by-gitignore",
);
te.assert_output(
&["--no-ignore", "ignored"],
"ignored-by-nothing
ignored-by-fdignore
ignored-by-gitignore
ignored-by-both",
);
}
/// Precedence of .fdignore files
#[test]
fn test_custom_ignore_precedence() {
let dirs = &["inner"];
let files = &["inner/foo"];
let te = TestEnv::new(dirs, files);
// Ignore 'foo' via .gitignore
fs::File::create(te.test_root().join("inner/.gitignore"))
.unwrap()
.write_all(b"foo")
.unwrap();
// Whitelist 'foo' via .fdignore
fs::File::create(te.test_root().join(".fdignore"))
.unwrap()
.write_all(b"!foo")
.unwrap();
te.assert_output(&["foo"], "inner/foo");
te.assert_output(&["--no-ignore-vcs", "foo"], "inner/foo");
te.assert_output(&["--no-ignore", "foo"], "inner/foo");
}
/// VCS ignored files (--no-ignore-vcs)
#[test]
fn test_no_ignore_vcs() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--no-ignore-vcs", "foo"],
"a.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Custom ignore files (--ignore-file)
#[test]
fn test_custom_ignore_files() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Ignore 'C.Foo2' and everything in 'three'.
fs::File::create(te.test_root().join("custom.ignore"))
.unwrap()
.write_all(b"C.Foo2\nthree")
.unwrap();
te.assert_output(
&["--ignore-file", "custom.ignore", "foo"],
"a.foo
one/b.foo
one/two/c.foo",
);
}
/// Ignored files with ripgrep aliases (-u / -uu)
#[test]
fn test_no_ignore_aliases() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["-u", "foo"],
"a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["-uu", "foo"],
".hidden.foo
a.foo
fdignored.foo
gitignored.foo
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Symlinks (--follow)
#[test]
fn test_follow() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--follow", "c.foo"],
"one/two/c.foo
one/two/C.Foo2
symlink/c.foo
symlink/C.Foo2",
);
}
// File system boundaries (--one-file-system)
// Limited to Unix because, to the best of my knowledge, there is no easy way to test a use case
// file systems mounted into the tree on Windows.
// Not limiting depth causes massive delay under Darwin, see BurntSushi/ripgrep#1429
#[test]
#[cfg(unix)]
fn test_file_system_boundaries() {
// Helper function to get the device ID for a given path
// Inspired by https://github.com/BurntSushi/ripgrep/blob/8892bf648cfec111e6e7ddd9f30e932b0371db68/ignore/src/walk.rs#L1693
fn device_num(path: impl AsRef<Path>) -> u64 {
use std::os::unix::fs::MetadataExt;
path.as_ref().metadata().map(|md| md.dev()).unwrap()
}
// Can't simulate file system boundaries
let te = TestEnv::new(&[], &[]);
let dev_null = Path::new("/dev/null");
// /dev/null should exist in all sane Unixes. Skip if it doesn't exist for some reason.
// Also skip should it be on the same device as the root partition for some reason.
if !dev_null.is_file() || device_num(dev_null) == device_num("/") {
return;
}
te.assert_output(
&["--full-path", "--max-depth", "2", "^/dev/null$", "/"],
"/dev/null",
);
te.assert_output(
&[
"--one-file-system",
"--full-path",
"--max-depth",
"2",
"^/dev/null$",
"/",
],
"",
);
}
#[test]
fn test_follow_broken_symlink() {
let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.create_broken_symlink("broken_symlink")
.expect("Failed to create broken symlink.");
te.assert_output(
&["symlink"],
"broken_symlink
symlink",
);
te.assert_output(
&["--type", "symlink", "symlink"],
"broken_symlink
symlink",
);
te.assert_output(&["--type", "file", "symlink"], "");
te.assert_output(
&["--follow", "--type", "symlink", "symlink"],
"broken_symlink",
);
te.assert_output(&["--follow", "--type", "file", "symlink"], "");
}
/// Null separator (--print0)
#[test]
fn test_print0() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--print0", "foo"],
"a.fooNULL
one/b.fooNULL
one/two/C.Foo2NULL
one/two/c.fooNULL
one/two/three/d.fooNULL
one/two/three/directory_fooNULL",
);
}
/// Maximum depth (--max-depth)
#[test]
fn test_max_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--max-depth", "3"],
"a.foo
e1 e2
one
one/b.foo
one/two
one/two/c.foo
one/two/C.Foo2
one/two/three
symlink",
);
te.assert_output(
&["--max-depth", "2"],
"a.foo
e1 e2
one
one/b.foo
one/two
symlink",
);
te.assert_output(
&["--max-depth", "1"],
"a.foo
e1 e2
one
symlink",
);
}
/// Minimum depth (--min-depth)
#[test]
fn test_min_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--min-depth", "3"],
"one/two/c.foo
one/two/C.Foo2
one/two/three
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["--min-depth", "4"],
"one/two/three/d.foo
one/two/three/directory_foo",
);
}
/// Exact depth (--exact-depth)
#[test]
fn test_exact_depth() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--exact-depth", "3"],
"one/two/c.foo
one/two/C.Foo2
one/two/three",
);
}
/// Pruning (--prune)
#[test]
fn test_prune() {
let dirs = &["foo/bar", "bar/foo", "baz"];
let files = &[
"foo/foo.file",
"foo/bar/foo.file",
"bar/foo.file",
"bar/foo/foo.file",
"baz/foo.file",
];
let te = TestEnv::new(dirs, files);
te.assert_output(
&["foo"],
"foo
foo/foo.file
foo/bar/foo.file
bar/foo.file
bar/foo
bar/foo/foo.file
baz/foo.file",
);
te.assert_output(
&["--prune", "foo"],
"foo
bar/foo
bar/foo.file
baz/foo.file",
);
}
/// Absolute paths (--absolute-path)
#[test]
fn test_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--absolute-path"],
&format!(
"{abs_path}/a.foo
{abs_path}/e1 e2
{abs_path}/one
{abs_path}/one/b.foo
{abs_path}/one/two
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo
{abs_path}/symlink",
abs_path = &abs_path
),
);
te.assert_output(
&["--absolute-path", "foo"],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Show absolute paths if the path argument is absolute
#[test]
fn test_implicit_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", &abs_path],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Absolute paths should be normalized
#[test]
fn test_normalized_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output_subdirectory(
"one",
&["--absolute-path", "foo", ".."],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
/// File type filter (--type)
#[test]
fn test_type() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--type", "f"],
"a.foo
e1 e2
one/b.foo
one/two/c.foo
one/two/C.Foo2
one/two/three/d.foo",
);
te.assert_output(&["--type", "f", "e1"], "e1 e2");
te.assert_output(
&["--type", "d"],
"one
one/two
one/two/three
one/two/three/directory_foo",
);
te.assert_output(
&["--type", "d", "--type", "l"],
"one
one/two
one/two/three
one/two/three/directory_foo
symlink",
);
te.assert_output(&["--type", "l"], "symlink");
}
/// Test `--type executable`
#[cfg(unix)]
#[test]
fn test_type_executable() {
use std::os::unix::fs::OpenOptionsExt;
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
fs::OpenOptions::new()
.create(true)
.write(true)
.mode(0o777)
.open(te.test_root().join("executable-file.sh"))
.unwrap();
te.assert_output(&["--type", "executable"], "executable-file.sh");
te.assert_output(
&["--type", "executable", "--type", "directory"],
"executable-file.sh
one
one/two
one/two/three
one/two/three/directory_foo",
);
}
/// Test `--type empty`
#[test]
fn test_type_empty() {
let te = TestEnv::new(&["dir_empty", "dir_nonempty"], &[]);
create_file_with_size(te.test_root().join("0_bytes.foo"), 0);
create_file_with_size(te.test_root().join("5_bytes.foo"), 5);
create_file_with_size(te.test_root().join("dir_nonempty").join("2_bytes.foo"), 2);
te.assert_output(
&["--type", "empty"],
"0_bytes.foo
dir_empty",
);
te.assert_output(
&["--type", "empty", "--type", "file", "--type", "directory"],
"0_bytes.foo
dir_empty",
);
te.assert_output(&["--type", "empty", "--type", "file"], "0_bytes.foo");
te.assert_output(&["--type", "empty", "--type", "directory"], "dir_empty");
}
/// File extension (--extension)
#[test]
fn test_extension() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--extension", "foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--extension", ".foo"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo",
);
te.assert_output(
&["--extension", ".foo", "--extension", "foo2"],
"a.foo
one/b.foo
one/two/c.foo
one/two/three/d.foo
one/two/C.Foo2",
);
te.assert_output(&["--extension", ".foo", "a"], "a.foo");
te.assert_output(&["--extension", "foo2"], "one/two/C.Foo2");
let te2 = TestEnv::new(&[], &["spam.bar.baz", "egg.bar.baz", "yolk.bar.baz.sig"]);
te2.assert_output(
&["--extension", ".bar.baz"],
"spam.bar.baz
egg.bar.baz",
);
te2.assert_output(&["--extension", "sig"], "yolk.bar.baz.sig");
te2.assert_output(&["--extension", "bar.baz.sig"], "yolk.bar.baz.sig");
let te3 = TestEnv::new(&[], &["latin1.e\u{301}xt", "smiley.☻"]);
te3.assert_output(&["--extension", "☻"], "smiley.☻");
te3.assert_output(&["--extension", ".e\u{301}xt"], "latin1.e\u{301}xt");
let te4 = TestEnv::new(&[], &[".hidden", "test.hidden"]);
te4.assert_output(&["--hidden", "--extension", ".hidden"], "test.hidden");
}
/// No file extension (test for the pattern provided in the --help text)
#[test]
fn test_no_extension() {
let te = TestEnv::new(
DEFAULT_DIRS,
&["a.foo", "aa", "one/b.foo", "one/bb", "one/two/three/d"],
);
te.assert_output(
&["^[^.]+$"],
"aa
one
one/bb
one/two
one/two/three
one/two/three/d
one/two/three/directory_foo
symlink",
);
te.assert_output(
&["^[^.]+$", "--type", "file"],
"aa
one/bb
one/two/three/d",
);
}
/// Symlink as search directory
#[test]
fn test_symlink_as_root() {
let mut te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.create_broken_symlink("broken_symlink")
.expect("Failed to create broken symlink.");
// From: http://pubs.opengroup.org/onlinepubs/9699919799/functions/getcwd.html
// The getcwd() function shall place an absolute pathname of the current working directory in
// the array pointed to by buf, and return buf. The pathname shall contain no components that
// are dot or dot-dot, or are symbolic links.
//
// Key points:
// 1. The path of the current working directory of a Unix process cannot contain symlinks.
// 2. The path of the current working directory of a Windows process can contain symlinks.
//
// More:
// 1. On Windows, symlinks are resolved after the ".." component.
// 2. On Unix, symlinks are resolved immediately as encountered.
let parent_parent = if cfg!(windows) { ".." } else { "../.." };
te.assert_output_subdirectory(
"symlink",
&["", parent_parent],
&format!(
"{dir}/a.foo
{dir}/broken_symlink
{dir}/e1 e2
{dir}/one
{dir}/one/b.foo
{dir}/one/two
{dir}/one/two/c.foo
{dir}/one/two/C.Foo2
{dir}/one/two/three
{dir}/one/two/three/d.foo
{dir}/one/two/three/directory_foo
{dir}/symlink",
dir = &parent_parent
),
);
}
#[test]
fn test_symlink_and_absolute_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let expected_path = if cfg!(windows) { "symlink" } else { "one/two" };
te.assert_output_subdirectory(
"symlink",
&["--absolute-path"],
&format!(
"{abs_path}/{expected_path}/c.foo
{abs_path}/{expected_path}/C.Foo2
{abs_path}/{expected_path}/three
{abs_path}/{expected_path}/three/d.foo
{abs_path}/{expected_path}/three/directory_foo",
abs_path = &abs_path,
expected_path = expected_path
),
);
}
#[test]
fn test_symlink_as_absolute_root() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["", &format!("{abs_path}/symlink", abs_path = abs_path)],
&format!(
"{abs_path}/symlink/c.foo
{abs_path}/symlink/C.Foo2
{abs_path}/symlink/three
{abs_path}/symlink/three/d.foo
{abs_path}/symlink/three/directory_foo",
abs_path = &abs_path
),
);
}
#[test]
fn test_symlink_and_full_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
let expected_path = if cfg!(windows) { "symlink" } else { "one/two" };
te.assert_output_subdirectory(
"symlink",
&[
"--absolute-path",
"--full-path",
&format!("^{prefix}.*three", prefix = prefix),
],
&format!(
"{abs_path}/{expected_path}/three
{abs_path}/{expected_path}/three/d.foo
{abs_path}/{expected_path}/three/directory_foo",
abs_path = &abs_path,
expected_path = expected_path
),
);
}
#[test]
fn test_symlink_and_full_path_abs_path() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let root = te.system_root();
let prefix = escape(&root.to_string_lossy());
te.assert_output(
&[
"--full-path",
&format!("^{prefix}.*symlink.*three", prefix = prefix),
&format!("{abs_path}/symlink", abs_path = abs_path),
],
&format!(
"{abs_path}/symlink/three
{abs_path}/symlink/three/d.foo
{abs_path}/symlink/three/directory_foo",
abs_path = &abs_path
),
);
}
/// Exclude patterns (--exclude)
#[test]
fn test_excludes() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--exclude", "*.foo"],
"one
one/two
one/two/C.Foo2
one/two/three
one/two/three/directory_foo
e1 e2
symlink",
);
te.assert_output(
&["--exclude", "*.foo", "--exclude", "*.Foo2"],
"one
one/two
one/two/three
one/two/three/directory_foo
e1 e2
symlink",
);
te.assert_output(
&["--exclude", "*.foo", "--exclude", "*.Foo2", "foo"],
"one/two/three/directory_foo",
);
te.assert_output(
&["--exclude", "one/two", "foo"],
"a.foo
one/b.foo",
);
te.assert_output(
&["--exclude", "one/**/*.foo"],
"a.foo
e1 e2
one
one/two
one/two/C.Foo2
one/two/three
one/two/three/directory_foo
symlink",
);
}
/// Shell script execution (--exec)
#[test]
fn test_exec() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
// TODO Windows tests: D:file.txt \file.txt \\server\share\file.txt ...
if !cfg!(windows) {
te.assert_output(
&["--absolute-path", "foo", "--exec", "echo"],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/c.foo
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
te.assert_output(
&["foo", "--exec", "echo", "{}"],
"a.foo
one/b.foo
one/two/C.Foo2
one/two/c.foo
one/two/three/d.foo
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{.}"],
"a
one/b
one/two/C
one/two/c
one/two/three/d
one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{/}"],
"a.foo
b.foo
C.Foo2
c.foo
d.foo
directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{/.}"],
"a
b
C
c
d
directory_foo",
);
te.assert_output(
&["foo", "--exec", "echo", "{//}"],
".
one
one/two
one/two
one/two/three
one/two/three",
);
te.assert_output(&["e1", "--exec", "printf", "%s.%s\n"], "e1 e2.");
}
}
#[test]
fn test_exec_batch() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let te = te.normalize_line(true);
// TODO Test for windows
if !cfg!(windows) {
te.assert_output(
&["--absolute-path", "foo", "--exec-batch", "echo"],
&format!(
"{abs_path}/a.foo {abs_path}/one/b.foo {abs_path}/one/two/C.Foo2 {abs_path}/one/two/c.foo {abs_path}/one/two/three/d.foo {abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
te.assert_output(
&["foo", "--exec-batch", "echo", "{}"],
"a.foo one/b.foo one/two/C.Foo2 one/two/c.foo one/two/three/d.foo one/two/three/directory_foo",
);
te.assert_output(
&["foo", "--exec-batch", "echo", "{/}"],
"a.foo b.foo C.Foo2 c.foo d.foo directory_foo",
);
te.assert_output(
&["no_match", "--exec-batch", "echo", "Matched: ", "{/}"],
"",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo", "{}", "{}"],
"[fd error]: Only one placeholder allowed for batch commands",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo", "{/}", ";", "-x", "echo"],
"error: The argument '--exec <cmd>' cannot be used with '--exec-batch <cmd>'",
);
te.assert_failure_with_error(
&["foo", "--exec-batch"],
"error: The argument '--exec-batch <cmd>' requires a value but none was supplied",
);
te.assert_failure_with_error(
&["foo", "--exec-batch", "echo {}"],
"[fd error]: First argument of exec-batch is expected to be a fixed executable",
);
}
}
/// Shell script execution (--exec) with a custom --path-separator
#[test]
fn test_exec_with_separator() {
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&[
"--path-separator=#",
"--absolute-path",
"foo",
"--exec",
"echo",
],
&format!(
"{abs_path}#a.foo
{abs_path}#one#b.foo
{abs_path}#one#two#C.Foo2
{abs_path}#one#two#c.foo
{abs_path}#one#two#three#d.foo
{abs_path}#one#two#three#directory_foo",
abs_path = abs_path.replace(std::path::MAIN_SEPARATOR, "#"),
),
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{}"],
"a.foo
one#b.foo
one#two#C.Foo2
one#two#c.foo
one#two#three#d.foo
one#two#three#directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{.}"],
"a
one#b
one#two#C
one#two#c
one#two#three#d
one#two#three#directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{/}"],
"a.foo
b.foo
C.Foo2
c.foo
d.foo
directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{/.}"],
"a
b
C
c
d
directory_foo",
);
te.assert_output(
&["--path-separator=#", "foo", "--exec", "echo", "{//}"],
".
one
one#two
one#two
one#two#three
one#two#three",
);
te.assert_output(
&["--path-separator=#", "e1", "--exec", "printf", "%s.%s\n"],
"e1 e2.",
);
}
/// Literal search (--fixed-strings)
#[test]
fn test_fixed_strings() {
let dirs = &["test1", "test2"];
let files = &["test1/a.foo", "test1/a_foo", "test2/Download (1).tar.gz"];
let te = TestEnv::new(dirs, files);
// Regex search, dot is treated as "any character"
te.assert_output(
&["a.foo"],
"test1/a.foo
test1/a_foo",
);
// Literal search, dot is treated as character
te.assert_output(&["--fixed-strings", "a.foo"], "test1/a.foo");
// Regex search, parens are treated as group
te.assert_output(&["download (1)"], "");
// Literal search, parens are treated as characters
te.assert_output(
&["--fixed-strings", "download (1)"],
"test2/Download (1).tar.gz",
);
// Combine with --case-sensitive
te.assert_output(&["--fixed-strings", "--case-sensitive", "download (1)"], "");
}
/// Filenames with invalid UTF-8 sequences
#[cfg(target_os = "linux")]
#[test]
fn test_invalid_utf8() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
let dirs = &["test1"];
let files = &[];
let te = TestEnv::new(dirs, files);
fs::File::create(
te.test_root()
.join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")),
)
.unwrap();
te.assert_output(&["", "test1/"], "test1/test_�invalid.txt");
te.assert_output(&["invalid", "test1/"], "test1/test_�invalid.txt");
// Should not be found under a different extension
te.assert_output(&["-e", "zip", "", "test1/"], "");
}
/// Filtering for file size (--size)
#[test]
fn test_size() {
let te = TestEnv::new(&[], &[]);
create_file_with_size(te.test_root().join("0_bytes.foo"), 0);
create_file_with_size(te.test_root().join("11_bytes.foo"), 11);
create_file_with_size(te.test_root().join("30_bytes.foo"), 30);
create_file_with_size(te.test_root().join("3_kilobytes.foo"), 3 * 1000);
create_file_with_size(te.test_root().join("4_kibibytes.foo"), 4 * 1024);
// Zero and non-zero sized files.
te.assert_output(
&["", "--size", "+0B"],
"0_bytes.foo
11_bytes.foo
30_bytes.foo
3_kilobytes.foo
4_kibibytes.foo",
);
// Zero sized files.
te.assert_output(&["", "--size", "-0B"], "0_bytes.foo");
te.assert_output(&["", "--size", "0B"], "0_bytes.foo");
te.assert_output(&["", "--size=0B"], "0_bytes.foo");
te.assert_output(&["", "-S", "0B"], "0_bytes.foo");
// Files with 2 bytes or more.
te.assert_output(
&["", "--size", "+2B"],
"11_bytes.foo
30_bytes.foo
3_kilobytes.foo
4_kibibytes.foo",
);
// Files with 2 bytes or less.
te.assert_output(&["", "--size", "-2B"], "0_bytes.foo");
// Files with size between 1 byte and 11 bytes.
te.assert_output(&["", "--size", "+1B", "--size", "-11B"], "11_bytes.foo");
// Files with size equal 11 bytes.
te.assert_output(&["", "--size", "11B"], "11_bytes.foo");
// Files with size between 1 byte and 30 bytes.
te.assert_output(
&["", "--size", "+1B", "--size", "-30B"],
"11_bytes.foo
30_bytes.foo",
);
// Combine with a search pattern
te.assert_output(&["^11_", "--size", "+1B", "--size", "-30B"], "11_bytes.foo");
// Files with size between 12 and 30 bytes.
te.assert_output(&["", "--size", "+12B", "--size", "-30B"], "30_bytes.foo");
// Files with size between 31 and 100 bytes.
te.assert_output(&["", "--size", "+31B", "--size", "-100B"], "");
// Files with size between 3 kibibytes and 5 kibibytes.
te.assert_output(&["", "--size", "+3ki", "--size", "-5ki"], "4_kibibytes.foo");
// Files with size between 3 kilobytes and 5 kilobytes.
te.assert_output(
&["", "--size", "+3k", "--size", "-5k"],
"3_kilobytes.foo
4_kibibytes.foo",
);
// Files with size greater than 3 kilobytes and less than 3 kibibytes.
te.assert_output(&["", "--size", "+3k", "--size", "-3ki"], "3_kilobytes.foo");
// Files with size equal 4 kibibytes.
te.assert_output(&["", "--size", "+4ki", "--size", "-4ki"], "4_kibibytes.foo");
te.assert_output(&["", "--size", "4ki"], "4_kibibytes.foo");
}
#[cfg(test)]
fn create_file_with_modified<P: AsRef<Path>>(path: P, duration_in_secs: u64) {
let st = SystemTime::now() - Duration::from_secs(duration_in_secs);
let ft = filetime::FileTime::from_system_time(st);
fs::File::create(&path).expect("creation failed");
filetime::set_file_times(&path, ft, ft).expect("time modification failed");
}
#[test]
fn test_modified_relative() {
let te = TestEnv::new(&[], &[]);
create_file_with_modified(te.test_root().join("foo_0_now"), 0);
create_file_with_modified(te.test_root().join("bar_1_min"), 60);
create_file_with_modified(te.test_root().join("foo_10_min"), 600);
create_file_with_modified(te.test_root().join("bar_1_h"), 60 * 60);
create_file_with_modified(te.test_root().join("foo_2_h"), 2 * 60 * 60);
create_file_with_modified(te.test_root().join("bar_1_day"), 24 * 60 * 60);
te.assert_output(
&["", "--changed-within", "15min"],
"foo_0_now
bar_1_min
foo_10_min",
);
te.assert_output(
&["", "--change-older-than", "15min"],
"bar_1_h
foo_2_h
bar_1_day",
);
te.assert_output(
&["foo", "--changed-within", "12h"],
"foo_0_now
foo_10_min
foo_2_h",
);
}
#[cfg(test)]
fn change_file_modified<P: AsRef<Path>>(path: P, iso_date: &str) {
let st = humantime::parse_rfc3339(iso_date).expect("invalid date");
let ft = filetime::FileTime::from_system_time(st);
filetime::set_file_times(path, ft, ft).expect("time modification failde");
}
#[test]
fn test_modified_asolute() {
let te = TestEnv::new(&[], &["15mar2018", "30dec2017"]);
change_file_modified(te.test_root().join("15mar2018"), "2018-03-15T12:00:00Z");
change_file_modified(te.test_root().join("30dec2017"), "2017-12-30T23:59:00Z");
te.assert_output(
&["", "--change-newer-than", "2018-01-01 00:00:00"],
"15mar2018",
);
te.assert_output(
&["", "--changed-before", "2018-01-01 00:00:00"],
"30dec2017",
);
}
#[test]
fn test_custom_path_separator() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["foo", "one", "--path-separator", "="],
"one=b.foo
one=two=c.foo
one=two=C.Foo2
one=two=three=d.foo
one=two=three=directory_foo",
);
}
#[test]
fn test_base_directory() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
te.assert_output(
&["--base-directory", "one"],
"b.foo
two
two/c.foo
two/C.Foo2
two/three
two/three/d.foo
two/three/directory_foo",
);
te.assert_output(
&["--base-directory", "one/two", "foo"],
"c.foo
C.Foo2
three/d.foo
three/directory_foo",
);
// Explicit root path
te.assert_output(
&["--base-directory", "one", "foo", "two"],
"two/c.foo
two/C.Foo2
two/three/d.foo
two/three/directory_foo",
);
// Ignore base directory when absolute path is used
let (te, abs_path) = get_test_env_with_abs_path(DEFAULT_DIRS, DEFAULT_FILES);
let abs_base_dir = &format!("{abs_path}/one/two", abs_path = &abs_path);
te.assert_output(
&["--base-directory", abs_base_dir, "foo", &abs_path],
&format!(
"{abs_path}/a.foo
{abs_path}/one/b.foo
{abs_path}/one/two/c.foo
{abs_path}/one/two/C.Foo2
{abs_path}/one/two/three/d.foo
{abs_path}/one/two/three/directory_foo",
abs_path = &abs_path
),
);
}
#[test]
fn test_max_results() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Unrestricted
te.assert_output(
&["--max-results=0", "c.foo"],
"one/two/C.Foo2
one/two/c.foo",
);
// Limited to two results
te.assert_output(
&["--max-results=2", "c.foo"],
"one/two/C.Foo2
one/two/c.foo",
);
// Limited to one result. We could find either C.Foo2 or c.foo
let assert_just_one_result_with_option = |option| {
let output = te.assert_success_and_get_output(".", &[option, "c.foo"]);
let stdout = String::from_utf8_lossy(&output.stdout)
.trim()
.replace(&std::path::MAIN_SEPARATOR.to_string(), "/");
assert!(stdout == "one/two/C.Foo2" || stdout == "one/two/c.foo");
};
assert_just_one_result_with_option("--max-results=1");
assert_just_one_result_with_option("-1");
}
/// Filenames with non-utf8 paths are passed to the executed program unchanged
///
/// Note:
/// - the test is disabled on Darwin/OSX, since it coerces file names to UTF-8,
/// even when the requested file name is not valid UTF-8.
/// - the test is currently disabled on Windows because I'm not sure how to create
/// invalid UTF-8 files on Windows
#[cfg(all(unix, not(target_os = "macos")))]
#[test]
fn test_exec_invalid_utf8() {
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
let dirs = &["test1"];
let files = &[];
let te = TestEnv::new(dirs, files);
fs::File::create(
te.test_root()
.join(OsStr::from_bytes(b"test1/test_\xFEinvalid.txt")),
)
.unwrap();
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{}"],
b"test1/test_\xFEinvalid.txt\n",
);
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{/}"],
b"test_\xFEinvalid.txt\n",
);
te.assert_output_raw(&["", "test1/", "--exec", "echo", "{//}"], b"test1\n");
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{.}"],
b"test1/test_\xFEinvalid\n",
);
te.assert_output_raw(
&["", "test1/", "--exec", "echo", "{/.}"],
b"test_\xFEinvalid\n",
);
}
#[test]
fn test_list_details() {
let te = TestEnv::new(DEFAULT_DIRS, DEFAULT_FILES);
// Make sure we can execute 'fd --list-details' without any errors.
te.assert_success_and_get_output(".", &["--list-details"]);
}
/// Make sure that fd fails if numeric arguments can not be parsed
#[test]
fn test_number_parsing_errors() {
let te = TestEnv::new(&[], &[]);
te.assert_failure(&["--threads=a"]);
te.assert_failure(&["-j", ""]);
te.assert_failure(&["--threads=0"]);
te.assert_failure(&["--min-depth=a"]);
te.assert_failure(&["--max-depth=a"]);
te.assert_failure(&["--maxdepth=a"]);
te.assert_failure(&["--exact-depth=a"]);
te.assert_failure(&["--max-buffer-time=a"]);
te.assert_failure(&["--max-results=a"]);
}
/// Print error if search pattern starts with a dot and --hidden is not set
/// (Unix only, hidden files on Windows work differently)
#[test]
#[cfg(unix)]
fn test_error_if_hidden_not_set_and_pattern_starts_with_dot() {
let te = TestEnv::new(&[], &[".gitignore", ".whatever", "non-hidden"]);
te.assert_failure(&["^\\.gitignore"]);
te.assert_failure(&["--glob", ".gitignore"]);
te.assert_output(&["--hidden", "^\\.gitignore"], ".gitignore");
te.assert_output(&["--hidden", "--glob", ".gitignore"], ".gitignore");
te.assert_output(&[".gitignore"], "");
}
|
use std::slice::raw::buf_as_slice;
use std::str::raw::c_str_to_static_slice;
use libc::{c_char, c_uint, c_void, ptrdiff_t, size_t};
use libc;
use {encode, Encode};
pub enum Object { }
pub struct Sel {
ptr: *const c_void,
}
pub struct Ivar {
ptr: *const c_void,
}
pub struct Class {
ptr: *mut Object,
}
pub type Imp = extern fn(*mut Object, Sel, ...) -> *mut Object;
#[link(name = "Foundation", kind = "framework")]
extern {
pub fn sel_registerName(name: *const c_char) -> Sel;
pub fn sel_getName(sel: Sel) -> *const c_char;
pub fn objc_getClass(name: *const c_char) -> Class;
pub fn class_getName(cls: Class) -> *const c_char;
pub fn class_getInstanceSize(cls: Class) -> size_t;
pub fn class_getInstanceVariable(cls: Class, name: *const c_char) -> Ivar;
pub fn class_copyIvarList(cls: Class, outCount: *mut c_uint) -> *mut Ivar;
pub fn class_addMethod(cls: Class, name: Sel, imp: Imp, types: *const c_char) -> bool;
pub fn class_addIvar(cls: Class, name: *const c_char, size: size_t, alignment: u8, types: *const c_char) -> bool;
pub fn objc_allocateClassPair(superclass: Class, name: *const c_char, extraBytes: size_t) -> Class;
pub fn objc_disposeClassPair(cls: Class);
pub fn objc_registerClassPair(cls: Class);
pub fn object_setInstanceVariable(obj: *mut Object, name: *const c_char, value: *mut c_void) -> Ivar;
pub fn object_getInstanceVariable(obj: *mut Object, name: *const c_char, outValue: *mut *mut c_void) -> Ivar;
pub fn object_setIvar(obj: *mut Object, ivar: Ivar, value: *mut Object);
pub fn object_getIvar(obj: *mut Object, ivar: Ivar) -> *mut Object;
pub fn object_getClass(obj: *mut Object) -> Class;
pub fn ivar_getName(ivar: Ivar) -> *const c_char;
pub fn ivar_getOffset(ivar: Ivar) -> ptrdiff_t;
pub fn ivar_getTypeEncoding(ivar: Ivar) -> *const c_char;
pub fn objc_msgSend(obj: *mut Object, op: Sel, ...) -> *mut Object;
}
impl Sel {
pub fn register(name: &str) -> Sel {
name.with_c_str(|name| unsafe {
sel_registerName(name)
})
}
pub fn name(&self) -> &str {
unsafe {
let name = sel_getName(*self);
c_str_to_static_slice(name)
}
}
}
impl PartialEq for Sel {
fn eq(&self, other: &Sel) -> bool {
self.ptr == other.ptr
}
}
impl Eq for Sel { }
impl Clone for Sel {
fn clone(&self) -> Sel { *self }
}
impl Ivar {
pub fn name(&self) -> &str {
unsafe {
let name = ivar_getName(*self);
c_str_to_static_slice(name)
}
}
pub fn offset(&self) -> int {
let offset = unsafe {
ivar_getOffset(*self)
};
offset as int
}
pub fn type_encoding(&self) -> &str {
unsafe {
let encoding = ivar_getTypeEncoding(*self);
c_str_to_static_slice(encoding)
}
}
}
impl Clone for Ivar {
fn clone(&self) -> Ivar { *self }
}
impl Object {
pub fn class(&self) -> Class {
unsafe {
object_getClass(self as *const Object as *mut Object)
}
}
pub unsafe fn get_ivar<T: Encode>(&self, name: &str) -> &T {
let cls = self.class();
let ptr = match cls.instance_variable(name) {
Some(ivar) => {
assert!(ivar.type_encoding() == encode::<T>());
let offset = ivar.offset();
let self_ptr = self as *const Object;
(self_ptr as *const u8).offset(offset) as *const T
}
None => fail!("Ivar {} not found on class {}", name, cls.name()),
};
&*ptr
}
pub unsafe fn get_mut_ivar<T: Encode>(&mut self, name: &str) -> &mut T {
let cls = self.class();
let ptr = match cls.instance_variable(name) {
Some(ivar) => {
assert!(ivar.type_encoding() == encode::<T>());
let offset = ivar.offset();
let self_ptr = self as *mut Object;
(self_ptr as *mut u8).offset(offset) as *mut T
}
None => fail!("Ivar {} not found on class {}", name, cls.name()),
};
&mut *ptr
}
pub unsafe fn set_ivar<T: Encode>(&mut self, name: &str, value: T) {
*self.get_mut_ivar::<T>(name) = value;
}
}
impl Class {
pub fn get(name: &str) -> Option<Class> {
let cls = name.with_c_str(|name| unsafe {
objc_getClass(name)
});
if cls.ptr.is_null() {
None
} else {
Some(cls)
}
}
pub fn name(&self) -> &str {
unsafe {
let name = class_getName(*self);
c_str_to_static_slice(name)
}
}
pub fn instance_size(&self) -> uint {
unsafe {
class_getInstanceSize(*self) as uint
}
}
pub fn instance_variable(&self, name: &str) -> Option<Ivar> {
let ivar = name.with_c_str(|name| unsafe {
class_getInstanceVariable(*self, name)
});
if ivar.ptr.is_null() {
None
} else {
Some(ivar)
}
}
pub fn instance_variables(&self) -> Vec<Ivar> {
unsafe {
let mut count: c_uint = 0;
let ivars = class_copyIvarList(*self, &mut count) as *const Ivar;
let vec = buf_as_slice(ivars, count as uint, |ivars| {
ivars.to_vec()
});
libc::free(ivars as *mut c_void);
vec
}
}
}
impl Clone for Class {
fn clone(&self) -> Class { *self }
}
pub trait Message { }
impl Message for Object { }
pub trait ToMessage {
fn as_ptr(&self) -> *mut Object;
fn is_nil(&self) -> bool {
self.as_ptr().is_null()
}
}
impl<T: Message> ToMessage for *mut T {
fn as_ptr(&self) -> *mut Object {
*self as *mut Object
}
}
impl<'a, T: Message> ToMessage for &'a T {
fn as_ptr(&self) -> *mut Object {
(*self as *const T as *mut T).as_ptr()
}
}
impl ToMessage for Class {
fn as_ptr(&self) -> *mut Object {
self.ptr
}
}
Added Method struct.
use std::slice::raw::buf_as_slice;
use std::str::raw::c_str_to_static_slice;
use libc::{c_char, c_uint, c_void, ptrdiff_t, size_t};
use libc;
use {encode, Encode};
pub enum Object { }
pub struct Sel {
ptr: *const c_void,
}
pub struct Ivar {
ptr: *const c_void,
}
pub struct Method {
ptr: *mut c_void,
}
pub struct Class {
ptr: *mut Object,
}
pub type Imp = extern fn(*mut Object, Sel, ...) -> *mut Object;
#[link(name = "Foundation", kind = "framework")]
extern {
pub fn sel_registerName(name: *const c_char) -> Sel;
pub fn sel_getName(sel: Sel) -> *const c_char;
pub fn objc_getClass(name: *const c_char) -> Class;
pub fn class_getName(cls: Class) -> *const c_char;
pub fn class_getInstanceSize(cls: Class) -> size_t;
pub fn class_getInstanceVariable(cls: Class, name: *const c_char) -> Ivar;
pub fn class_copyIvarList(cls: Class, outCount: *mut c_uint) -> *mut Ivar;
pub fn class_addMethod(cls: Class, name: Sel, imp: Imp, types: *const c_char) -> bool;
pub fn class_addIvar(cls: Class, name: *const c_char, size: size_t, alignment: u8, types: *const c_char) -> bool;
pub fn objc_allocateClassPair(superclass: Class, name: *const c_char, extraBytes: size_t) -> Class;
pub fn objc_disposeClassPair(cls: Class);
pub fn objc_registerClassPair(cls: Class);
pub fn object_setInstanceVariable(obj: *mut Object, name: *const c_char, value: *mut c_void) -> Ivar;
pub fn object_getInstanceVariable(obj: *mut Object, name: *const c_char, outValue: *mut *mut c_void) -> Ivar;
pub fn object_setIvar(obj: *mut Object, ivar: Ivar, value: *mut Object);
pub fn object_getIvar(obj: *mut Object, ivar: Ivar) -> *mut Object;
pub fn object_getClass(obj: *mut Object) -> Class;
pub fn ivar_getName(ivar: Ivar) -> *const c_char;
pub fn ivar_getOffset(ivar: Ivar) -> ptrdiff_t;
pub fn ivar_getTypeEncoding(ivar: Ivar) -> *const c_char;
pub fn objc_msgSend(obj: *mut Object, op: Sel, ...) -> *mut Object;
pub fn method_getName(method: Method) -> Sel;
pub fn method_getImplementation(method: Method) -> Imp;
pub fn method_getTypeEncoding(method: Method) -> *const c_char;
pub fn method_getNumberOfArguments(method: Method) -> c_uint;
pub fn method_setImplementation(method: Method, imp: Imp) -> Imp;
}
impl Sel {
pub fn register(name: &str) -> Sel {
name.with_c_str(|name| unsafe {
sel_registerName(name)
})
}
pub fn name(&self) -> &str {
unsafe {
let name = sel_getName(*self);
c_str_to_static_slice(name)
}
}
}
impl PartialEq for Sel {
fn eq(&self, other: &Sel) -> bool {
self.ptr == other.ptr
}
}
impl Eq for Sel { }
impl Clone for Sel {
fn clone(&self) -> Sel { *self }
}
impl Ivar {
pub fn name(&self) -> &str {
unsafe {
let name = ivar_getName(*self);
c_str_to_static_slice(name)
}
}
pub fn offset(&self) -> int {
let offset = unsafe {
ivar_getOffset(*self)
};
offset as int
}
pub fn type_encoding(&self) -> &str {
unsafe {
let encoding = ivar_getTypeEncoding(*self);
c_str_to_static_slice(encoding)
}
}
}
impl Clone for Ivar {
fn clone(&self) -> Ivar { *self }
}
impl Method {
pub fn name(&self) -> Sel {
unsafe {
method_getName(*self)
}
}
pub fn type_encoding(&self) -> &str {
unsafe {
let encoding = method_getTypeEncoding(*self);
c_str_to_static_slice(encoding)
}
}
pub fn arguments(&self) -> uint {
unsafe {
method_getNumberOfArguments(*self) as uint
}
}
pub fn implementation(&self) -> Imp {
unsafe {
method_getImplementation(*self)
}
}
pub unsafe fn set_implementation(&mut self, imp: Imp) -> Imp {
method_setImplementation(*self, imp)
}
}
impl Object {
pub fn class(&self) -> Class {
unsafe {
object_getClass(self as *const Object as *mut Object)
}
}
pub unsafe fn get_ivar<T: Encode>(&self, name: &str) -> &T {
let cls = self.class();
let ptr = match cls.instance_variable(name) {
Some(ivar) => {
assert!(ivar.type_encoding() == encode::<T>());
let offset = ivar.offset();
let self_ptr = self as *const Object;
(self_ptr as *const u8).offset(offset) as *const T
}
None => fail!("Ivar {} not found on class {}", name, cls.name()),
};
&*ptr
}
pub unsafe fn get_mut_ivar<T: Encode>(&mut self, name: &str) -> &mut T {
let cls = self.class();
let ptr = match cls.instance_variable(name) {
Some(ivar) => {
assert!(ivar.type_encoding() == encode::<T>());
let offset = ivar.offset();
let self_ptr = self as *mut Object;
(self_ptr as *mut u8).offset(offset) as *mut T
}
None => fail!("Ivar {} not found on class {}", name, cls.name()),
};
&mut *ptr
}
pub unsafe fn set_ivar<T: Encode>(&mut self, name: &str, value: T) {
*self.get_mut_ivar::<T>(name) = value;
}
}
impl Class {
pub fn get(name: &str) -> Option<Class> {
let cls = name.with_c_str(|name| unsafe {
objc_getClass(name)
});
if cls.ptr.is_null() {
None
} else {
Some(cls)
}
}
pub fn name(&self) -> &str {
unsafe {
let name = class_getName(*self);
c_str_to_static_slice(name)
}
}
pub fn instance_size(&self) -> uint {
unsafe {
class_getInstanceSize(*self) as uint
}
}
pub fn instance_variable(&self, name: &str) -> Option<Ivar> {
let ivar = name.with_c_str(|name| unsafe {
class_getInstanceVariable(*self, name)
});
if ivar.ptr.is_null() {
None
} else {
Some(ivar)
}
}
pub fn instance_variables(&self) -> Vec<Ivar> {
unsafe {
let mut count: c_uint = 0;
let ivars = class_copyIvarList(*self, &mut count) as *const Ivar;
let vec = buf_as_slice(ivars, count as uint, |ivars| {
ivars.to_vec()
});
libc::free(ivars as *mut c_void);
vec
}
}
}
impl Clone for Class {
fn clone(&self) -> Class { *self }
}
pub trait Message { }
impl Message for Object { }
pub trait ToMessage {
fn as_ptr(&self) -> *mut Object;
fn is_nil(&self) -> bool {
self.as_ptr().is_null()
}
}
impl<T: Message> ToMessage for *mut T {
fn as_ptr(&self) -> *mut Object {
*self as *mut Object
}
}
impl<'a, T: Message> ToMessage for &'a T {
fn as_ptr(&self) -> *mut Object {
(*self as *const T as *mut T).as_ptr()
}
}
impl ToMessage for Class {
fn as_ptr(&self) -> *mut Object {
self.ptr
}
}
|
use std::io;
use std::borrow;
use std::uint;
use std::result;
use std::str;
use std::vec;
use std::num::{One, Zero, ToStrRadix};
use std::hashmap::HashMap;
use std::managed;
use bounded_iterator::BoundedIterator;
use extra::bigint::BigInt;
use extra::complex::Cmplx;
use datum::*;
use primitive::*;
use numeric::*;
use stack::*;
use parser::Parser;
enum RuntimeData {
RUndef,
RPrim(PFunc),
RProc(~[@str],
Option<@str>,
~[@LDatum<RuntimeData>],
@mut Stack<HashMap<@str, @LDatum<RuntimeData>>>),
}
impl Clone for RuntimeData {
fn clone(&self) -> RuntimeData {
match self {
&RUndef => RUndef,
&RPrim(f) => RPrim(f),
&RProc(ref args, ref vargs, ref body, ref env) => {
let cloneargs = do args.map |&arg| {
arg
};
RProc(cloneargs, *vargs, body.clone(), *env)
},
}
}
}
fn eq(lhs: &RuntimeData, rhs: &RuntimeData) -> bool {
match (lhs, rhs) {
(&RPrim(l), &RPrim(r)) => l == r,
(&RProc(_,_,_,_), &RProc(_,_,_,_)) => lhs == rhs,
_ => false,
}
}
impl Eq for RuntimeData {
fn eq(&self, other: &RuntimeData) -> bool {
eq(self, other)
}
fn ne(&self, other: &RuntimeData) -> bool {
!eq(self, other)
}
}
fn data_to_str(data: &RuntimeData) -> ~str {
match *data {
RUndef => ~"<undefined>",
RPrim(f) => fmt!("<primitive:%s>", f.to_str()),
RProc(_, _, _, _) => fmt!("<procedure 0x%08x>", borrow::to_uint(data)),
}
}
impl ToStr for RuntimeData {
fn to_str(&self) -> ~str {
data_to_str(self)
}
}
type RDatum = LDatum<RuntimeData>;
pub trait DatumConv {
fn from_datum<R>(@RDatum, &fn(&Self) -> R) -> Option<R>;
fn to_datum(&self) -> @RDatum;
fn typename() -> ~str;
}
impl DatumConv for @RDatum {
fn from_datum<R>(datum: @RDatum, op: &fn(&@RDatum) -> R) -> Option<R> {
Some(op(&datum))
}
fn to_datum(&self) -> @RDatum {
*self
}
fn typename() -> ~str {
~"datum"
}
}
impl DatumConv for RuntimeData {
fn from_datum<R>(datum: @RDatum, op: &fn(&RuntimeData) -> R) -> Option<R> {
match datum {
@LExt(ref r) => Some(op(r)),
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LExt(self.clone())
}
fn typename() -> ~str {
~"procedure"
}
}
impl DatumConv for LNumeric {
fn from_datum<R>(datum: @RDatum, op: &fn(&LNumeric) -> R) -> Option<R> {
match datum {
@LNum(ref n) => Some(op(n)),
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LNum(self.clone())
}
fn typename() -> ~str {
~"number"
}
}
impl DatumConv for BigInt {
fn from_datum<R>(datum: @RDatum, op: &fn(&BigInt) -> R) -> Option<R> {
match datum {
@LNum(ref n) => match *n {
NExact( Cmplx{ re: ref re, im: ref im } ) =>
if im.is_zero() && *re.numerator() == One::one() {
Some(op(re.denominator()))
} else {
None
},
NInexact(_) => None,
},
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LNum(from_bigint(self.clone()))
}
fn typename() -> ~str {
~"integer"
}
}
struct GetList {
list: ~[@RDatum]
}
impl DatumConv for GetList {
#[inline]
fn from_datum<R>(datum: @RDatum, op: &fn(&GetList) -> R) -> Option<R> {
match datum.to_list() {
Some(l) => Some(op(&GetList{ list: l })),
_ => None,
}
}
#[inline]
fn to_datum(&self) -> @RDatum {
LDatum::from_list(self.list)
}
fn typename() -> ~str {
~"list"
}
}
struct Runtime {
stdin: @Reader,
stdout: @Writer,
stderr: @Writer,
env: @mut Stack<HashMap<@str, @RDatum>>,
global: HashMap<@str, Either<@RDatum, PrimSyntax>>,
qq_lvl: uint,
}
#[deriving(Eq)]
enum RuntimeError {
UnboundVariable(@str),
RefMacro(@str),
NotCallable,
NotList,
ArgNumError(uint, Option<uint>, uint),
TypeError,
DivideByZeroError,
NilEval,
BadSyntax(PrimSyntax, ~str),
ParseError(uint, uint, ~str),
RangeError,
}
impl ToStr for RuntimeError {
fn to_str(&self) -> ~str {
err_to_str(self)
}
}
priv fn err_to_str(&err: &RuntimeError) -> ~str {
match err {
UnboundVariable(name) => ~"unbound variable: " + name,
RefMacro(name) => ~"cannot reference macro name: " + name,
NotCallable => ~"not callable",
NotList => ~"not list",
ArgNumError(min, Some(max), argnum) => {
if min == max {
fmt!("expected %u arguments, but found %u arguments", min, argnum)
} else {
fmt!("expected %u-%u arguments, but found %u arguments", min, max, argnum)
}
},
ArgNumError(expected, None, argnum) => {
fmt!("expected %u or more arguments, but found %u arguments", expected, argnum)
},
TypeError => ~"type error",
DivideByZeroError => ~"divide by zero",
NilEval => ~"() cannot be evaluated",
BadSyntax(syn, reason) => ~"bad syntax for " + syn.to_str() + ": " + reason,
ParseError(line, col, reason) => fmt!("failed to parse: %u:%u: %s", line, col, reason),
RangeError => ~"index out of range",
}
}
fn load_prelude() -> HashMap<@str, Either<@RDatum, PrimSyntax>> {
let mut map = HashMap::new();
let mut prim_iter = BoundedIterator::new::<PFunc>();
for prim_iter.advance |prim:PFunc| {
let key = prim.to_str();
map.insert(key.to_managed(), Left(@LExt(RPrim(prim))));
}
let mut syntax_iter = BoundedIterator::new::<PrimSyntax>();
for syntax_iter.advance |syntax:PrimSyntax| {
let key = syntax.to_str();
map.insert(key.to_managed(), Right(syntax));
}
map.insert("pi".to_managed(), Left(@LNum(inexact(Real::pi(), 0f64))));
map
}
priv fn call_prim1(args: &[@RDatum],
op: &fn(@RDatum) -> Result<@RDatum, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
if args.len() == 1 {
op(args[0])
} else {
Err(ArgNumError(1, Some(1), args.len()))
}
}
priv fn call_prim2(args: &[@RDatum],
op: &fn(@RDatum, @RDatum) -> Result<@RDatum, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
if args.len() == 2 {
op(args[0], args[1])
} else {
Err(ArgNumError(2, Some(2), args.len()))
}
}
priv fn call_tc1<A: DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A) -> R
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg] => {
let res = DatumConv::from_datum::<A, R>(arg, op);
match res {
Some(ref x) => Ok(x.to_datum()),
None => Err(TypeError),
}
},
_ => Err(ArgNumError(1, Some(1), args.len())),
}
}
priv fn call_tc2<A: DatumConv, B:DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A, &B) -> R
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg0, arg1] => {
let res = do DatumConv::from_datum::<A, Option<R>>(arg0) |a| {
do DatumConv::from_datum::<B, R>(arg1) |b| {
op(a, b)
}
};
match res {
Some(Some(x)) => Ok(x.to_datum()),
_ => Err(TypeError),
}
},
_ => Err(ArgNumError(2, Some(2), args.len())),
}
}
priv fn call_err2<A: DatumConv, B: DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A, &B) -> Result<R, RuntimeError>
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg0, arg1] => {
let r = do DatumConv::from_datum::<A, Result<R, RuntimeError>>(arg0) |a| {
let res = do DatumConv::from_datum::<B, Result<R, RuntimeError>>(arg1) |b| {
op(a, b)
};
match res {
Some(x) => x,
None => Err(TypeError),
}
};
match r {
Some(Ok(x)) => Ok(x.to_datum()),
Some(Err(e)) => Err(e),
None => Err(TypeError),
}
},
_ => Err(ArgNumError(2, Some(2), args.len())),
}
}
priv fn call_num_foldl(args: &[@RDatum],
a0: &LNumeric,
op: &fn(&LNumeric, &LNumeric) -> Result<LNumeric, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
let mut res:LNumeric = a0.clone();
let mut err = false;
do args.each |&arg| {
match arg {
@LNum(ref a) => {
match op(&res, a) {
Ok(n) => {
res = n;
err = false;
},
_ => {
err = true;
}
}
},
_ => {
err = true;
}
}
!err
};
if err {
Err(TypeError)
} else {
Ok(@LNum(res))
}
}
priv fn call_real_prim1(args: &[@RDatum], op: &fn(&LReal) -> LReal)
-> Result<@RDatum, RuntimeError>
{
match args {
[@LNum(ref n)] => match get_real(n) {
None => Err(TypeError),
Some(r) => Ok(@LNum(from_real(&op(&r)))),
},
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
}
}
priv fn call_real_prim2(args: &[@RDatum], op: &fn(&LReal, &LReal) -> LNumeric)
-> Result<@RDatum, RuntimeError>
{
match args {
[@LNum(ref x), @LNum(ref y)] => match (get_real(x), get_real(y)) {
(Some(ref rx), Some(ref ry)) => Ok(@LNum(op(rx, ry))),
_ => Err(TypeError),
},
[_, _] => Err(TypeError),
_ => Err(ArgNumError(2, Some(2), args.len())),
}
}
priv fn call_inexact(args: &[@RDatum], op: &fn(&Cmplx<f64>) -> Cmplx<f64>)
-> Result<@RDatum, RuntimeError>
{
match args {
[@LNum(ref n)] => Ok(@LNum(NInexact(op(&n.to_inexact())))),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
}
}
priv fn call_real_bfoldl(args: &[@RDatum], op: &fn(&LReal, &LReal) -> bool)
-> Result<@RDatum, RuntimeError>
{
let n = args.len();
if n < 2 {
return Err(ArgNumError(2, None, n));
}
let mut a = match args[0] {
@LNum(ref n) => match get_real(n) {
None => return Err(TypeError),
Some(r) => r,
},
_ => return Err(TypeError),
};
let mut idx = 1;
while idx < n {
let b = match args[idx] {
@LNum(ref n) => match get_real(n) {
None => return Err(TypeError),
Some(r) => r,
},
_ => return Err(TypeError),
};
if !op(&a, &b) {
return Ok(@LBool(false));
}
a = b;
idx += 1;
}
return Ok(@LBool(true));
}
priv fn get_bindings(arg: &RDatum) -> Result<~[(@str, @RDatum)], ~str> {
match arg.to_list() {
None => Err(~"non-list bindings"),
Some(bindings) => do result::map_vec(bindings) |datum| {
match datum.to_list() {
Some([@LIdent(name), expr]) => Ok((name, expr)),
Some(_) | None => Err(~"invalid binding")
}
}
}
}
priv fn get_syms(&arg: &@RDatum) -> Result<(~[@str], Option<@str>), ~str> {
let mut iter = arg;
let mut args : ~[@str] = ~[];
let mut varargs : Option<@str> = None;
loop {
match *iter {
LCons(h, t) => match *h {
LIdent(name) => {
args.push(name);
iter = t;
},
_ => {
return Err(~"non-symbol argument");
}
},
LIdent(name) => {
varargs = Some(name);
break;
},
LNil => {
break;
},
_ => {
return Err(~"non-list argument");
},
}
}
Ok((args, varargs))
}
impl Runtime {
fn get_syntax(&self, val: &RDatum) -> Option<PrimSyntax> {
match *val {
LIdent(name) => match self.global.find(&name) {
Some(&Right(syn)) => Some(syn),
_ => None,
},
_ => None,
}
}
fn find_var(&self, name: &@str) -> Result<@RDatum, RuntimeError> {
let mut val: Option<@RDatum> = None;
do self.env.each |frame| {
match frame.find(name) {
None => true,
Some(v) => {
val = Some(*v);
false
}
}
};
match val {
None => match self.global.find(name) {
Some(&Left(v)) => Ok(v),
Some(&Right(_)) => Err(RefMacro(*name)),
None => Err(UnboundVariable(*name)),
},
Some(v) => Ok(v),
}
}
fn syn_let(&mut self, bindings: &RDatum, body: &[@RDatum]) -> Result<@RDatum, RuntimeError> {
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let mut arg_frame = HashMap::new();
let mut err:Option<RuntimeError> = None;
do b.each |&(name, expr)| {
match self.eval(expr) {
Ok(val) => {
arg_frame.insert(name, val);
true
}
Err(e) => {
err = Some(e);
false
}
}
};
match err {
Some(e) => Err(e),
None => self.local_eval(arg_frame, self.env, body)
}
}
}
}
fn syn_letstar(&mut self, bindings: &RDatum, body: &[@RDatum])
-> Result<@RDatum, RuntimeError>
{
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let old_frame = self.env;
let mut err:Option<RuntimeError> = None;
do b.each |&(name, expr)| {
match self.eval(expr) {
Ok(val) => {
let mut arg_frame = HashMap::new();
arg_frame.insert(name, val);
self.env = @mut push(self.env, arg_frame);
true
},
Err(e) => {
err = Some(e);
false
},
}
};
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
match err {
Some(e) => {
res = Err(e);
},
None => {
do body.each |&val| {
res = self.eval(val);
res.is_ok()
};
}
};
self.env = old_frame;
return res
}
}
}
fn syn_letrec(&mut self, bindings: &RDatum, body: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let old_frame = self.env;
let mut arg_frame = HashMap::new();
let (names, exprs) = vec::unzip(b);
for names.each |&name| {
arg_frame.insert(name, @LExt(RUndef));
}
self.env = @mut push(old_frame, arg_frame);
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
match result::map_vec(exprs, |&expr| { self.eval(expr) }) {
Ok(vals) => {
do self.env.mut_top |frame| {
for uint::range(0, names.len()) |i| {
frame.insert(names[i], vals[i]);
}
};
do body.each |&val| {
res = self.eval(val);
res.is_ok()
};
},
Err(e) => {
res = Err(e);
},
}
self.env = old_frame;
res
}
}
}
fn cond(&mut self, conds: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut i = 0u;
let mut exprs = vec::with_capacity(conds.len());
let mut else_opt = None;
while i < conds.len() {
match conds[i].to_list() {
Some([@LIdent(els), expr]) if els.as_slice() == "else" =>
if i == conds.len()-1 {
else_opt = Some(expr);
} else {
return Err(BadSyntax(SynCond, ~"trailing conditions after else"));
},
Some([pred, expr]) => exprs.push((pred, expr)),
_ => return Err(BadSyntax(SynCond, ~"invalid conditional expression")),
}
i += 1;
}
let mut res = Ok(@LExt(RUndef));
let expr_end = do exprs.each |&(pred, expr)| {
match self.eval(pred) {
Err(e) => {
res = Err(e);
false
},
Ok(@LBool(false)) => true,
_ => {
res = self.eval(expr);
false
},
}
};
match else_opt {
Some(else_expr) if expr_end => self.eval(else_expr),
_ => res
}
}
fn define(&mut self, args: ~[@RDatum]) -> Result<(@str, @RDatum), RuntimeError> {
match get_syms(&args[0]) {
Err(e) => Err(BadSyntax(SynDefine, e)),
Ok((anames, varargs)) =>
if anames.is_empty() {
match varargs {
None => Err(BadSyntax(SynDefine, ~"name not given")),
Some(name) => if args.len() != 2 {
Err(BadSyntax(SynDefine, ~"multiple expressions"))
} else {
do self.eval(args[1]).map |&val| {
(name, val)
}
}
}
} else {
let name = anames[0];
let anames = anames.slice(1, anames.len()).to_owned();
let seq = args.slice(1, args.len()).to_owned();
let proc = @LExt(RProc(anames, varargs, seq, self.env));
Ok((name, proc))
}
}
}
fn run_syntax(&mut self,
syn: PrimSyntax,
args: ~[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match syn {
SynIf => if args.len() == 3 {
do self.eval(args[0]).chain |cond| {
match *cond {
LBool(false) => self.eval(args[2]),
_ => self.eval(args[1]),
}
}
} else {
Err(BadSyntax(SynIf, ~"bad number of arguments"))
},
SynCond => self.cond(args),
SynLambda => if args.len() < 2 {
Err(BadSyntax(SynLambda, ~"no body given"))
} else {
match get_syms(&args[0]) {
Err(e) => Err(BadSyntax(SynLambda, e)),
Ok((anames, varargs)) => {
let seq = args.slice(1, args.len()).to_owned();
Ok(@LExt(RProc(anames, varargs, seq, self.env)))
},
}
},
SynLet => if args.len() < 2 {
Err(BadSyntax(SynLet, ~"no body given"))
} else {
self.syn_let(args[0], args.slice(1, args.len()))
},
SynLetRec => if args.len() < 2 {
Err(BadSyntax(SynLetRec, ~"no body given"))
} else {
self.syn_letrec(args[0], args.slice(1, args.len()))
},
SynLetStar => if args.len() < 2 {
Err(BadSyntax(SynLetRec, ~"no body given"))
} else {
self.syn_letstar(args[0], args.slice(1, args.len()))
},
SynDefine => if args.len() < 2 {
Err(BadSyntax(SynDefine, ~"no body given"))
} else {
let definition = self.define(args);
match definition {
Err(e) => Err(e),
Ok((name, val)) => {
if self.env.size_hint() == Some(0) {
// this is the top-level context
// just bind the definition in global
self.global.insert(name, Left(val));
} else {
// this is not the top-level context
// create a new frame
let mut frame = HashMap::new();
frame.insert(name, val);
self.env = @mut push(self.env, frame);
};
Ok(@LNil)
},
}
},
SynSet => if args.len() != 2 {
Err(BadSyntax(SynSet, ~"bad number of arguments"))
} else {
match *args[0] {
LIdent(name) => do self.eval(args[1]).chain |val| {
if set_var(self.env, &name, val) {
Ok(@LNil)
} else {
Err(BadSyntax(SynSet, ~"unbound variable"))
}
},
_ => Err(BadSyntax(SynSet, ~"cannot set non-variable"))
}
},
SynQuote => if args.len() == 1 {
Ok(args[0])
} else {
Err(BadSyntax(SynQuote, ~"bad number of arguments"))
},
SynQQuote => if args.len() == 1 {
self.quasiquote(&args[0])
} else {
Err(BadSyntax(SynQQuote, ~"bad number of arguments"))
},
SynUnquote => if args.len() == 1 {
self.unquote(&args[0])
} else {
Err(BadSyntax(SynUnquote, ~"bad number of arguments"))
},
SynAnd => self.syn_and(args),
SynOr => self.syn_or(args),
}
}
priv fn syn_and(&mut self, args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut res = @LBool(true);
let mut i = 0u;
while i < args.len() {
match self.eval(args[i]) {
Ok(@LBool(false)) => return Ok(@LBool(false)),
Ok(x) => { res = x },
Err(e) => return Err(e),
};
i += 1;
}
return Ok(res)
}
priv fn syn_or(&mut self, args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut i = 0u;
while i < args.len() {
match self.eval(args[i]) {
Ok(@LBool(false)) => (),
Ok(x) => return Ok(x),
Err(e) => return Err(e),
};
i += 1;
}
return Ok(@LBool(false))
}
fn call_proc(&mut self,
anames: &[@str],
vargs: Option<@str>,
code: &[@RDatum],
frame: @mut Stack<HashMap<@str, @RDatum>>,
args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
// create new frame to store args
let mut arg_frame = HashMap::new();
match vargs {
None => if args.len() != anames.len() {
return Err(ArgNumError(anames.len(), Some(anames.len()), args.len()));
},
Some(vname) => if args.len() < anames.len() {
return Err(ArgNumError(anames.len(), None, args.len()));
} else {
let vslice = args.slice(anames.len(), args.len());
let va = do vslice.rev_iter().fold(@LNil) |a, &l| {
@LCons(l, a)
};
arg_frame.insert(vname, va);
},
}
for uint::range(0, anames.len()) |i| {
arg_frame.insert(anames[i], args[i]);
}
self.local_eval(arg_frame, frame, code)
}
fn local_eval(&mut self,
arg_frame: HashMap<@str, @RDatum>,
frame: @mut Stack<HashMap<@str, @RDatum>>,
code: &[@RDatum])
-> Result<@RDatum, RuntimeError>
{
// store current env
let old_env = self.env;
// create new local env
self.env = @mut push(frame, arg_frame);
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
do code.each() |&val| {
res = self.eval(val);
res.is_ok()
};
// restore env
self.env = old_env;
res
}
fn call_prim(&mut self,
f: PFunc,
args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match f {
PEval => match args {
[arg] => self.eval(arg),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PApply => do call_err2::<RuntimeData, GetList, @RDatum>(args) |f, l| {
self.apply(f, l.list)
},
PBegin => if args.len() == 0 {
Ok(@LExt(RUndef))
} else {
Ok(*args.last())
},
PAdd => do call_num_foldl(args, &Zero::zero()) |&lhs, &rhs| { Ok(lhs + rhs) },
PSub => match args {
[] => Err(ArgNumError(1, None, 0)),
[@LNum(ref x)] => Ok(@LNum(-*x)),
[@LNum(ref x), ..tail] =>
do call_num_foldl(tail, x) |&lhs, &rhs| { Ok(lhs - rhs) },
_ => Err(TypeError),
},
PMul => do call_num_foldl(args, &One::one()) |&lhs, &rhs| { Ok(lhs * rhs) },
PDiv => match args {
[] => Err(ArgNumError(1, None, 0)),
[@LNum(ref x)] => if x.is_zero() {
Err(DivideByZeroError)
} else {
Ok(@LNum(x.recip()))
},
[@LNum(ref x), ..tail] =>
do call_num_foldl(tail, x) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs / rhs)
}
},
_ => Err(TypeError),
},
PQuotient => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs / rhs)
}
},
PRemainder => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs % rhs)
}
},
PModulo => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(modulo(lhs, rhs))
}
},
PFloor => do call_real_prim1(args) |&f| { f.floor() },
PCeiling => do call_real_prim1(args) |&f| { f.ceil() },
PRound => do call_real_prim1(args) |&f| { f.round() },
PTruncate => do call_real_prim1(args) |&f| { f.trunc() },
PExp => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.exp() },
PLog => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.ln() },
PSin => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.sin() },
PCos => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.cos() },
PTan => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.tan() },
PAsin => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.asin() },
PAcos => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.acos() },
PAtan => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.atan() },
PSqrt => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.sqrt() },
PExpt => do call_tc2::<LNumeric, LNumeric, LNumeric>(args) |x, r| { x.pow(r) },
PMakeRectangular => do call_real_prim2(args) |rx, ry| {
coerce(rx, ry, |&a, &b| { exact(a, b) }, |a, b| { inexact(a, b) })
},
PMakePolar => do call_real_prim2(args) |rx, ry| {
polar(rx.to_inexact(), ry.to_inexact())
},
PRealPart => do call_tc1::<LNumeric, LNumeric>(args) |&x| {
match x {
NExact( Cmplx { re: ref re, im: _ } ) => from_rational(re),
NInexact( Cmplx { re: re, im: _ } ) => from_f64(re),
}
},
PImagPart => do call_tc1::<LNumeric, LNumeric>(args) |&x| {
match x {
NExact( Cmplx { re: _, im: ref im } ) => from_rational(im),
NInexact( Cmplx { re: _, im: im } ) => from_f64(im),
}
},
PMagnitude => do call_tc1::<LNumeric, LNumeric>(args) |x| {
let (norm, _) = x.to_inexact().to_polar();
from_f64(norm)
},
PAngle => do call_tc1::<LNumeric, LNumeric>(args) |x| {
let (_, arg) = x.to_inexact().to_polar();
from_f64(arg)
},
PNumerator => match args {
[@LNum(NExact( Cmplx { re: ref re, im: ref im } ))] if im.is_zero() =>
Ok(@LNum( from_bigint(re.numerator().clone()) )),
[_] =>
Err(TypeError),
_ =>
Err(ArgNumError(1, Some(1), args.len())),
},
PDenominator => match args {
[@LNum(NExact( Cmplx { re: ref re, im: ref im } ))] if im.is_zero() =>
Ok(@LNum( from_bigint(re.denominator().clone()) )),
[_] =>
Err(TypeError),
_ =>
Err(ArgNumError(1, Some(1), args.len())),
},
PCar => do call_prim1(args) |arg| {
match *arg {
LCons(h, _) => Ok(h),
_ => Err(TypeError),
}
},
PCdr => do call_prim1(args) |arg| {
match *arg {
LCons(_, t) => Ok(t),
_ => Err(TypeError),
}
},
PCons => do call_prim2(args) |arg1, arg2| { Ok(@LCons(arg1, arg2)) },
PEqv => do call_prim2(args) |arg1, arg2| {
let b =
match (arg1, arg2) {
(@LCons(_, _), @LCons(_, _)) => managed::ptr_eq(arg1, arg2),
(@LString(_), @LString(_)) => managed::ptr_eq(arg1, arg2),
(@LExt(_), @LExt(_)) => managed::ptr_eq(arg1, arg2),
_ => arg1 == arg2,
};
Ok(@LBool(b))
},
PEqual => do call_prim2(args) |arg1, arg2| {
Ok(@LBool(arg1 == arg2))
},
PNumber => do call_prim1(args) |arg| {
match *arg {
LNum(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PReal => match args {
[@LNum(ref c)] => Ok(@LBool(c.is_real())),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PInteger => do call_prim1(args) |arg| {
match *arg {
LNum(NExact(Cmplx { re: ref re, im: ref im })) =>
Ok(@LBool(*re.numerator() == One::one() && *im.numerator() == One::one())),
LNum(NInexact(Cmplx { re: re, im: im })) =>
Ok(@LBool(re.round() == re && im.round() == im)),
_ => Ok(@LBool(false)),
}
},
PExact => do call_prim1(args) |arg| {
match *arg {
LNum(NExact(_)) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PInexact => do call_prim1(args) |arg| {
match *arg {
LNum(NInexact(_)) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PExactInexact => do call_tc1::<LNumeric, LNumeric>(args) |arg| {
NInexact(arg.to_inexact())
},
PNumberString => match args {
[@LNum(ref x)] => Ok(@LString(x.to_str())),
[@LNum(ref x), @LNum(ref r)] => match get_uint(r) {
None => Err(TypeError),
Some(radix) => {
match x {
&NExact(ref n) => Ok(@LString(n.to_str_radix(radix))),
_ =>
if radix == 10 {
Ok(@LString(x.to_str()))
} else {
Err(TypeError)
},
}
},
},
[_] | [_, _] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PEQ => do call_real_bfoldl(args) |&lhs, &rhs| { lhs == rhs },
PGT => do call_real_bfoldl(args) |&lhs, &rhs| { lhs > rhs },
PLT => do call_real_bfoldl(args) |&lhs, &rhs| { lhs < rhs },
PGE => do call_real_bfoldl(args) |&lhs, &rhs| { lhs >= rhs },
PLE => do call_real_bfoldl(args) |&lhs, &rhs| { lhs <= rhs },
PNot => match args {
[@LBool(false)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PBoolean => match args {
[@LBool(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PChar => do call_prim1(args) |arg| {
match arg {
@LChar(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PProcedure => match args {
[@LExt(RUndef)] => Ok(@LBool(false)),
[@LExt(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PIsVector => match args {
[@LVector(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PMakeVector => match args {
[@LNum(ref x)] => match get_uint(x) {
Some(k) => {
let mut v = ~[];
v.grow(k, &@LExt(RUndef));
Ok(@LVector(v))
},
None => Err(TypeError),
},
[@LNum(ref x), ref y] => match get_uint(x) {
Some(k) => {
let mut v = ~[];
v.grow(k, y);
Ok(@LVector(v))
},
None => Err(TypeError),
},
[_] | [_, _] => Err(TypeError),
_ => Err(ArgNumError(1, Some(2), args.len())),
},
PVector => Ok(@LVector(args.to_owned())),
PVectorLength => match args {
[@LVector(ref v)] => Ok(@LNum(from_uint(v.len()))),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PVectorRef => match args {
[@LVector(ref v), @LNum(ref k)] => match get_uint(k) {
Some(i) =>
if i < v.len() {
Ok(v[i])
} else {
Err(RangeError)
},
None => Err(TypeError),
},
[_, _] => Err(TypeError),
_ => Err(ArgNumError(2, Some(2), args.len())),
},
PVectorList => match args {
[@LVector(ref v)] => Ok(LDatum::from_list(*v)),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PListVector => match args {
[arg] => match arg.to_list() {
Some(v) => Ok(@LVector(v)),
None => Err(TypeError),
},
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PNull => do call_prim1(args) |arg| {
match arg {
@LNil => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PPair => do call_prim1(args) |arg| {
match arg {
@LCons(_, _) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PIsString => do call_prim1(args) |arg| {
match arg {
@LString(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PString => {
let char_list = do result::map_vec(args) |arg| {
match *arg {
@LChar(c) => Ok(c),
_ => Err(TypeError),
}
};
match char_list {
Ok(chars) => Ok(@LString(str::from_chars(chars))),
Err(e) => Err(e)
}
},
PStringLength => do call_prim1(args) |arg| {
match arg {
@LString(ref s) => Ok(@LNum(from_int(s.len() as int))),
_ => Err(TypeError),
}
},
PStringRef => do call_prim2(args) |arg, idx| {
match (arg, idx) {
(@LString(ref s), @LNum(ref n)) => match get_uint(n) {
Some(i) => if i < s.len() {
Ok(@LChar(s.char_at(i)))
} else {
Err(RangeError)
},
None => Err(TypeError),
},
_ => Err(TypeError),
}
},
PSubstring => match args {
[@LString(ref s), @LNum(ref n)] =>
match get_uint(n) {
Some(i) => if i <= s.len() {
Ok(@LString(s.slice(i, s.len()).to_owned()))
} else {
Err(RangeError)
},
_ => Err(TypeError),
},
[@LString(ref s), @LNum(ref from), @LNum(ref to)] =>
match (get_uint(from), get_uint(to)) {
(Some(start), Some(end)) =>
if start <= end && end <= s.len() {
Ok(@LString(s.slice(start, end).to_owned()))
} else {
Err(RangeError)
},
_ => Err(TypeError),
},
[_, _] | [_, _, _] => Err(TypeError),
_ => Err(ArgNumError(2, Some(3), args.len())),
},
PSymbol => do call_prim1(args) |arg| {
match arg {
@LIdent(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PSymbolString => do call_prim1(args) |arg| {
match arg {
@LIdent(ref s) => Ok(@LString(s.to_owned())),
_ => Err(TypeError),
}
},
PStringSymbol => do call_prim1(args) |arg| {
match arg {
@LString(ref s) => Ok(@LIdent(s.to_managed())),
_ => Err(TypeError),
}
},
}
}
fn recursive_qq(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
match *val {
@LCons(ref h, ref t) =>
match is_quote(h,t) {
Some((QuasiQuote, ref v)) =>
do self.quasiquote(v).map |&qv| {
@LCons(@LIdent(@"quasiquote"), @LCons(qv, @LNil))
},
Some((Unquote, ref v)) =>
self.unquote(v),
_ =>
do self.recursive_qq(h).chain |qh| {
do self.recursive_qq(t).map |&qt| {
@LCons(qh, qt)
}
},
},
@LVector(ref v) => {
match result::map_vec(*v, |x| { self.recursive_qq(x) }) {
Ok(qmap) => Ok(@LVector(qmap)),
Err(e) => Err(e),
}
},
_ =>
Ok(*val),
}
}
fn quasiquote(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
self.qq_lvl += 1;
let res = self.recursive_qq(val);
self.qq_lvl -= 1;
res
}
fn unquote(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
if self.qq_lvl == 0 {
Err(BadSyntax(SynUnquote, ~"unquote not nested in quasiquote"))
} else {
self.qq_lvl -= 1;
let res =
if self.qq_lvl == 0 {
self.eval(*val)
} else {
do self.recursive_qq(val).map |&qval| {
@LCons(@LIdent(@"unquote"), @LCons(qval, @LNil))
}
};
self.qq_lvl += 1;
res
}
}
fn apply(&mut self, proc: &RuntimeData, args: &[@RDatum]) -> Result<@RDatum, RuntimeError> {
match proc {
&RUndef =>
Err(NotCallable),
&RPrim(f) =>
self.call_prim(f, args),
&RProc(ref anames, ref vargs, ref code, ref env) =>
self.call_proc(*anames, *vargs, *code, *env, args),
}
}
fn call(&mut self, proc: &RuntimeData, aexprs: ~[@RDatum]) -> Result<@RDatum, RuntimeError> {
match result::map_vec(aexprs, |&expr| self.eval(expr))
{
Ok(args) => self.apply(proc, args),
Err(e) => Err(e),
}
}
pub fn new_std() -> Runtime {
Runtime {
stdin: io::stdin(),
stdout: io::stdout(),
stderr: io::stderr(),
env: @mut Stack::new(),
global: load_prelude(),
qq_lvl: 0,
}
}
pub fn eval(&mut self, val: @RDatum) -> Result<@RDatum, RuntimeError>
{
match *val {
LIdent(name) => self.find_var(&name),
LCons(fexpr, aexpr) =>
match aexpr.to_list() {
None => Err(NotList),
Some(aexprs) => {
match self.get_syntax(fexpr) {
Some(syntax) =>
self.run_syntax(syntax, aexprs),
None =>
match self.eval(fexpr) {
Ok(@LExt(ref proc)) => self.call(proc, aexprs),
Ok(_) => Err(NotCallable),
Err(e) => Err(e),
},
}
},
},
LNil => Err(NilEval),
_ => Ok(val),
}
}
pub fn load(&mut self, rdr: @io::Reader) -> Result<@RDatum, RuntimeError>
{
let mut parser = Parser(rdr);
match parser.parse() {
Ok(datum) => self.eval(@datum),
Err(e) => {
let (line, col) = parser.pos();
Err(ParseError(line, col, e))
},
}
}
}
priv fn set_var(env: @mut Stack<HashMap<@str, @RDatum>>,
name: &@str,
val: @RDatum) -> bool {
let mut success = false;
do env.each_mut |frame| {
match frame.find_mut(name) {
None => (),
Some(v) => {
success = true;
*v = val;
}
}
!success
};
success
}
remove boilerplate call_real_prim*
use std::io;
use std::borrow;
use std::uint;
use std::result;
use std::str;
use std::vec;
use std::num::{One, Zero, ToStrRadix};
use std::hashmap::HashMap;
use std::managed;
use bounded_iterator::BoundedIterator;
use extra::bigint::BigInt;
use extra::complex::Cmplx;
use datum::*;
use primitive::*;
use numeric::*;
use stack::*;
use parser::Parser;
enum RuntimeData {
RUndef,
RPrim(PFunc),
RProc(~[@str],
Option<@str>,
~[@LDatum<RuntimeData>],
@mut Stack<HashMap<@str, @LDatum<RuntimeData>>>),
}
impl Clone for RuntimeData {
fn clone(&self) -> RuntimeData {
match self {
&RUndef => RUndef,
&RPrim(f) => RPrim(f),
&RProc(ref args, ref vargs, ref body, ref env) => {
let cloneargs = do args.map |&arg| {
arg
};
RProc(cloneargs, *vargs, body.clone(), *env)
},
}
}
}
fn eq(lhs: &RuntimeData, rhs: &RuntimeData) -> bool {
match (lhs, rhs) {
(&RPrim(l), &RPrim(r)) => l == r,
(&RProc(_,_,_,_), &RProc(_,_,_,_)) => lhs == rhs,
_ => false,
}
}
impl Eq for RuntimeData {
fn eq(&self, other: &RuntimeData) -> bool {
eq(self, other)
}
fn ne(&self, other: &RuntimeData) -> bool {
!eq(self, other)
}
}
fn data_to_str(data: &RuntimeData) -> ~str {
match *data {
RUndef => ~"<undefined>",
RPrim(f) => fmt!("<primitive:%s>", f.to_str()),
RProc(_, _, _, _) => fmt!("<procedure 0x%08x>", borrow::to_uint(data)),
}
}
impl ToStr for RuntimeData {
fn to_str(&self) -> ~str {
data_to_str(self)
}
}
type RDatum = LDatum<RuntimeData>;
pub trait DatumConv {
fn from_datum<R>(@RDatum, &fn(&Self) -> R) -> Option<R>;
fn to_datum(&self) -> @RDatum;
fn typename() -> ~str;
}
impl DatumConv for @RDatum {
fn from_datum<R>(datum: @RDatum, op: &fn(&@RDatum) -> R) -> Option<R> {
Some(op(&datum))
}
fn to_datum(&self) -> @RDatum {
*self
}
fn typename() -> ~str {
~"datum"
}
}
impl DatumConv for RuntimeData {
fn from_datum<R>(datum: @RDatum, op: &fn(&RuntimeData) -> R) -> Option<R> {
match datum {
@LExt(ref r) => Some(op(r)),
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LExt(self.clone())
}
fn typename() -> ~str {
~"procedure"
}
}
impl DatumConv for LNumeric {
fn from_datum<R>(datum: @RDatum, op: &fn(&LNumeric) -> R) -> Option<R> {
match datum {
@LNum(ref n) => Some(op(n)),
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LNum(self.clone())
}
fn typename() -> ~str {
~"number"
}
}
impl DatumConv for LReal {
fn from_datum<R>(datum: @RDatum, op: &fn(&LReal) -> R) -> Option<R> {
match datum {
@LNum(ref n) => match get_real(n) {
Some(ref r) => Some(op(r)),
None => None,
},
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LNum(from_real(self))
}
fn typename() -> ~str {
~"real number"
}
}
impl DatumConv for BigInt {
fn from_datum<R>(datum: @RDatum, op: &fn(&BigInt) -> R) -> Option<R> {
match datum {
@LNum(ref n) => match *n {
NExact( Cmplx{ re: ref re, im: ref im } ) =>
if im.is_zero() && *re.numerator() == One::one() {
Some(op(re.denominator()))
} else {
None
},
NInexact(_) => None,
},
_ => None,
}
}
fn to_datum(&self) -> @RDatum {
@LNum(from_bigint(self.clone()))
}
fn typename() -> ~str {
~"integer"
}
}
struct GetList {
list: ~[@RDatum]
}
impl DatumConv for GetList {
#[inline]
fn from_datum<R>(datum: @RDatum, op: &fn(&GetList) -> R) -> Option<R> {
match datum.to_list() {
Some(l) => Some(op(&GetList{ list: l })),
_ => None,
}
}
#[inline]
fn to_datum(&self) -> @RDatum {
LDatum::from_list(self.list)
}
fn typename() -> ~str {
~"list"
}
}
struct Runtime {
stdin: @Reader,
stdout: @Writer,
stderr: @Writer,
env: @mut Stack<HashMap<@str, @RDatum>>,
global: HashMap<@str, Either<@RDatum, PrimSyntax>>,
qq_lvl: uint,
}
#[deriving(Eq)]
enum RuntimeError {
UnboundVariable(@str),
RefMacro(@str),
NotCallable,
NotList,
ArgNumError(uint, Option<uint>, uint),
TypeError,
DivideByZeroError,
NilEval,
BadSyntax(PrimSyntax, ~str),
ParseError(uint, uint, ~str),
RangeError,
}
impl ToStr for RuntimeError {
fn to_str(&self) -> ~str {
err_to_str(self)
}
}
priv fn err_to_str(&err: &RuntimeError) -> ~str {
match err {
UnboundVariable(name) => ~"unbound variable: " + name,
RefMacro(name) => ~"cannot reference macro name: " + name,
NotCallable => ~"not callable",
NotList => ~"not list",
ArgNumError(min, Some(max), argnum) => {
if min == max {
fmt!("expected %u arguments, but found %u arguments", min, argnum)
} else {
fmt!("expected %u-%u arguments, but found %u arguments", min, max, argnum)
}
},
ArgNumError(expected, None, argnum) => {
fmt!("expected %u or more arguments, but found %u arguments", expected, argnum)
},
TypeError => ~"type error",
DivideByZeroError => ~"divide by zero",
NilEval => ~"() cannot be evaluated",
BadSyntax(syn, reason) => ~"bad syntax for " + syn.to_str() + ": " + reason,
ParseError(line, col, reason) => fmt!("failed to parse: %u:%u: %s", line, col, reason),
RangeError => ~"index out of range",
}
}
fn load_prelude() -> HashMap<@str, Either<@RDatum, PrimSyntax>> {
let mut map = HashMap::new();
let mut prim_iter = BoundedIterator::new::<PFunc>();
for prim_iter.advance |prim:PFunc| {
let key = prim.to_str();
map.insert(key.to_managed(), Left(@LExt(RPrim(prim))));
}
let mut syntax_iter = BoundedIterator::new::<PrimSyntax>();
for syntax_iter.advance |syntax:PrimSyntax| {
let key = syntax.to_str();
map.insert(key.to_managed(), Right(syntax));
}
map.insert("pi".to_managed(), Left(@LNum(inexact(Real::pi(), 0f64))));
map
}
priv fn call_prim1(args: &[@RDatum],
op: &fn(@RDatum) -> Result<@RDatum, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
if args.len() == 1 {
op(args[0])
} else {
Err(ArgNumError(1, Some(1), args.len()))
}
}
priv fn call_prim2(args: &[@RDatum],
op: &fn(@RDatum, @RDatum) -> Result<@RDatum, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
if args.len() == 2 {
op(args[0], args[1])
} else {
Err(ArgNumError(2, Some(2), args.len()))
}
}
priv fn call_tc1<A: DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A) -> R
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg] => {
let res = DatumConv::from_datum::<A, R>(arg, op);
match res {
Some(ref x) => Ok(x.to_datum()),
None => Err(TypeError),
}
},
_ => Err(ArgNumError(1, Some(1), args.len())),
}
}
priv fn call_tc2<A: DatumConv, B:DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A, &B) -> R
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg0, arg1] => {
let res = do DatumConv::from_datum::<A, Option<R>>(arg0) |a| {
do DatumConv::from_datum::<B, R>(arg1) |b| {
op(a, b)
}
};
match res {
Some(Some(x)) => Ok(x.to_datum()),
_ => Err(TypeError),
}
},
_ => Err(ArgNumError(2, Some(2), args.len())),
}
}
priv fn call_err2<A: DatumConv, B: DatumConv, R: DatumConv> (
args: &[@RDatum], op: &fn(&A, &B) -> Result<R, RuntimeError>
) -> Result<@RDatum, RuntimeError>
{
match args {
[arg0, arg1] => {
let r = do DatumConv::from_datum::<A, Result<R, RuntimeError>>(arg0) |a| {
let res = do DatumConv::from_datum::<B, Result<R, RuntimeError>>(arg1) |b| {
op(a, b)
};
match res {
Some(x) => x,
None => Err(TypeError),
}
};
match r {
Some(Ok(x)) => Ok(x.to_datum()),
Some(Err(e)) => Err(e),
None => Err(TypeError),
}
},
_ => Err(ArgNumError(2, Some(2), args.len())),
}
}
priv fn call_num_foldl(args: &[@RDatum],
a0: &LNumeric,
op: &fn(&LNumeric, &LNumeric) -> Result<LNumeric, RuntimeError>)
-> Result<@RDatum, RuntimeError>
{
let mut res:LNumeric = a0.clone();
let mut err = false;
do args.each |&arg| {
match arg {
@LNum(ref a) => {
match op(&res, a) {
Ok(n) => {
res = n;
err = false;
},
_ => {
err = true;
}
}
},
_ => {
err = true;
}
}
!err
};
if err {
Err(TypeError)
} else {
Ok(@LNum(res))
}
}
priv fn call_inexact(args: &[@RDatum], op: &fn(&Cmplx<f64>) -> Cmplx<f64>)
-> Result<@RDatum, RuntimeError>
{
match args {
[@LNum(ref n)] => Ok(@LNum(NInexact(op(&n.to_inexact())))),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
}
}
priv fn call_real_bfoldl(args: &[@RDatum], op: &fn(&LReal, &LReal) -> bool)
-> Result<@RDatum, RuntimeError>
{
let n = args.len();
if n < 2 {
return Err(ArgNumError(2, None, n));
}
let mut a = match args[0] {
@LNum(ref n) => match get_real(n) {
None => return Err(TypeError),
Some(r) => r,
},
_ => return Err(TypeError),
};
let mut idx = 1;
while idx < n {
let b = match args[idx] {
@LNum(ref n) => match get_real(n) {
None => return Err(TypeError),
Some(r) => r,
},
_ => return Err(TypeError),
};
if !op(&a, &b) {
return Ok(@LBool(false));
}
a = b;
idx += 1;
}
return Ok(@LBool(true));
}
priv fn get_bindings(arg: &RDatum) -> Result<~[(@str, @RDatum)], ~str> {
match arg.to_list() {
None => Err(~"non-list bindings"),
Some(bindings) => do result::map_vec(bindings) |datum| {
match datum.to_list() {
Some([@LIdent(name), expr]) => Ok((name, expr)),
Some(_) | None => Err(~"invalid binding")
}
}
}
}
priv fn get_syms(&arg: &@RDatum) -> Result<(~[@str], Option<@str>), ~str> {
let mut iter = arg;
let mut args : ~[@str] = ~[];
let mut varargs : Option<@str> = None;
loop {
match *iter {
LCons(h, t) => match *h {
LIdent(name) => {
args.push(name);
iter = t;
},
_ => {
return Err(~"non-symbol argument");
}
},
LIdent(name) => {
varargs = Some(name);
break;
},
LNil => {
break;
},
_ => {
return Err(~"non-list argument");
},
}
}
Ok((args, varargs))
}
impl Runtime {
fn get_syntax(&self, val: &RDatum) -> Option<PrimSyntax> {
match *val {
LIdent(name) => match self.global.find(&name) {
Some(&Right(syn)) => Some(syn),
_ => None,
},
_ => None,
}
}
fn find_var(&self, name: &@str) -> Result<@RDatum, RuntimeError> {
let mut val: Option<@RDatum> = None;
do self.env.each |frame| {
match frame.find(name) {
None => true,
Some(v) => {
val = Some(*v);
false
}
}
};
match val {
None => match self.global.find(name) {
Some(&Left(v)) => Ok(v),
Some(&Right(_)) => Err(RefMacro(*name)),
None => Err(UnboundVariable(*name)),
},
Some(v) => Ok(v),
}
}
fn syn_let(&mut self, bindings: &RDatum, body: &[@RDatum]) -> Result<@RDatum, RuntimeError> {
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let mut arg_frame = HashMap::new();
let mut err:Option<RuntimeError> = None;
do b.each |&(name, expr)| {
match self.eval(expr) {
Ok(val) => {
arg_frame.insert(name, val);
true
}
Err(e) => {
err = Some(e);
false
}
}
};
match err {
Some(e) => Err(e),
None => self.local_eval(arg_frame, self.env, body)
}
}
}
}
fn syn_letstar(&mut self, bindings: &RDatum, body: &[@RDatum])
-> Result<@RDatum, RuntimeError>
{
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let old_frame = self.env;
let mut err:Option<RuntimeError> = None;
do b.each |&(name, expr)| {
match self.eval(expr) {
Ok(val) => {
let mut arg_frame = HashMap::new();
arg_frame.insert(name, val);
self.env = @mut push(self.env, arg_frame);
true
},
Err(e) => {
err = Some(e);
false
},
}
};
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
match err {
Some(e) => {
res = Err(e);
},
None => {
do body.each |&val| {
res = self.eval(val);
res.is_ok()
};
}
};
self.env = old_frame;
return res
}
}
}
fn syn_letrec(&mut self, bindings: &RDatum, body: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match get_bindings(bindings) {
Err(e) => Err(BadSyntax(SynLet, e)),
Ok(b) => {
let old_frame = self.env;
let mut arg_frame = HashMap::new();
let (names, exprs) = vec::unzip(b);
for names.each |&name| {
arg_frame.insert(name, @LExt(RUndef));
}
self.env = @mut push(old_frame, arg_frame);
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
match result::map_vec(exprs, |&expr| { self.eval(expr) }) {
Ok(vals) => {
do self.env.mut_top |frame| {
for uint::range(0, names.len()) |i| {
frame.insert(names[i], vals[i]);
}
};
do body.each |&val| {
res = self.eval(val);
res.is_ok()
};
},
Err(e) => {
res = Err(e);
},
}
self.env = old_frame;
res
}
}
}
fn cond(&mut self, conds: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut i = 0u;
let mut exprs = vec::with_capacity(conds.len());
let mut else_opt = None;
while i < conds.len() {
match conds[i].to_list() {
Some([@LIdent(els), expr]) if els.as_slice() == "else" =>
if i == conds.len()-1 {
else_opt = Some(expr);
} else {
return Err(BadSyntax(SynCond, ~"trailing conditions after else"));
},
Some([pred, expr]) => exprs.push((pred, expr)),
_ => return Err(BadSyntax(SynCond, ~"invalid conditional expression")),
}
i += 1;
}
let mut res = Ok(@LExt(RUndef));
let expr_end = do exprs.each |&(pred, expr)| {
match self.eval(pred) {
Err(e) => {
res = Err(e);
false
},
Ok(@LBool(false)) => true,
_ => {
res = self.eval(expr);
false
},
}
};
match else_opt {
Some(else_expr) if expr_end => self.eval(else_expr),
_ => res
}
}
fn define(&mut self, args: ~[@RDatum]) -> Result<(@str, @RDatum), RuntimeError> {
match get_syms(&args[0]) {
Err(e) => Err(BadSyntax(SynDefine, e)),
Ok((anames, varargs)) =>
if anames.is_empty() {
match varargs {
None => Err(BadSyntax(SynDefine, ~"name not given")),
Some(name) => if args.len() != 2 {
Err(BadSyntax(SynDefine, ~"multiple expressions"))
} else {
do self.eval(args[1]).map |&val| {
(name, val)
}
}
}
} else {
let name = anames[0];
let anames = anames.slice(1, anames.len()).to_owned();
let seq = args.slice(1, args.len()).to_owned();
let proc = @LExt(RProc(anames, varargs, seq, self.env));
Ok((name, proc))
}
}
}
fn run_syntax(&mut self,
syn: PrimSyntax,
args: ~[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match syn {
SynIf => if args.len() == 3 {
do self.eval(args[0]).chain |cond| {
match *cond {
LBool(false) => self.eval(args[2]),
_ => self.eval(args[1]),
}
}
} else {
Err(BadSyntax(SynIf, ~"bad number of arguments"))
},
SynCond => self.cond(args),
SynLambda => if args.len() < 2 {
Err(BadSyntax(SynLambda, ~"no body given"))
} else {
match get_syms(&args[0]) {
Err(e) => Err(BadSyntax(SynLambda, e)),
Ok((anames, varargs)) => {
let seq = args.slice(1, args.len()).to_owned();
Ok(@LExt(RProc(anames, varargs, seq, self.env)))
},
}
},
SynLet => if args.len() < 2 {
Err(BadSyntax(SynLet, ~"no body given"))
} else {
self.syn_let(args[0], args.slice(1, args.len()))
},
SynLetRec => if args.len() < 2 {
Err(BadSyntax(SynLetRec, ~"no body given"))
} else {
self.syn_letrec(args[0], args.slice(1, args.len()))
},
SynLetStar => if args.len() < 2 {
Err(BadSyntax(SynLetRec, ~"no body given"))
} else {
self.syn_letstar(args[0], args.slice(1, args.len()))
},
SynDefine => if args.len() < 2 {
Err(BadSyntax(SynDefine, ~"no body given"))
} else {
let definition = self.define(args);
match definition {
Err(e) => Err(e),
Ok((name, val)) => {
if self.env.size_hint() == Some(0) {
// this is the top-level context
// just bind the definition in global
self.global.insert(name, Left(val));
} else {
// this is not the top-level context
// create a new frame
let mut frame = HashMap::new();
frame.insert(name, val);
self.env = @mut push(self.env, frame);
};
Ok(@LNil)
},
}
},
SynSet => if args.len() != 2 {
Err(BadSyntax(SynSet, ~"bad number of arguments"))
} else {
match *args[0] {
LIdent(name) => do self.eval(args[1]).chain |val| {
if set_var(self.env, &name, val) {
Ok(@LNil)
} else {
Err(BadSyntax(SynSet, ~"unbound variable"))
}
},
_ => Err(BadSyntax(SynSet, ~"cannot set non-variable"))
}
},
SynQuote => if args.len() == 1 {
Ok(args[0])
} else {
Err(BadSyntax(SynQuote, ~"bad number of arguments"))
},
SynQQuote => if args.len() == 1 {
self.quasiquote(&args[0])
} else {
Err(BadSyntax(SynQQuote, ~"bad number of arguments"))
},
SynUnquote => if args.len() == 1 {
self.unquote(&args[0])
} else {
Err(BadSyntax(SynUnquote, ~"bad number of arguments"))
},
SynAnd => self.syn_and(args),
SynOr => self.syn_or(args),
}
}
priv fn syn_and(&mut self, args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut res = @LBool(true);
let mut i = 0u;
while i < args.len() {
match self.eval(args[i]) {
Ok(@LBool(false)) => return Ok(@LBool(false)),
Ok(x) => { res = x },
Err(e) => return Err(e),
};
i += 1;
}
return Ok(res)
}
priv fn syn_or(&mut self, args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
let mut i = 0u;
while i < args.len() {
match self.eval(args[i]) {
Ok(@LBool(false)) => (),
Ok(x) => return Ok(x),
Err(e) => return Err(e),
};
i += 1;
}
return Ok(@LBool(false))
}
fn call_proc(&mut self,
anames: &[@str],
vargs: Option<@str>,
code: &[@RDatum],
frame: @mut Stack<HashMap<@str, @RDatum>>,
args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
// create new frame to store args
let mut arg_frame = HashMap::new();
match vargs {
None => if args.len() != anames.len() {
return Err(ArgNumError(anames.len(), Some(anames.len()), args.len()));
},
Some(vname) => if args.len() < anames.len() {
return Err(ArgNumError(anames.len(), None, args.len()));
} else {
let vslice = args.slice(anames.len(), args.len());
let va = do vslice.rev_iter().fold(@LNil) |a, &l| {
@LCons(l, a)
};
arg_frame.insert(vname, va);
},
}
for uint::range(0, anames.len()) |i| {
arg_frame.insert(anames[i], args[i]);
}
self.local_eval(arg_frame, frame, code)
}
fn local_eval(&mut self,
arg_frame: HashMap<@str, @RDatum>,
frame: @mut Stack<HashMap<@str, @RDatum>>,
code: &[@RDatum])
-> Result<@RDatum, RuntimeError>
{
// store current env
let old_env = self.env;
// create new local env
self.env = @mut push(frame, arg_frame);
let mut res:Result<@RDatum, RuntimeError> = Err(NilEval);
do code.each() |&val| {
res = self.eval(val);
res.is_ok()
};
// restore env
self.env = old_env;
res
}
fn call_prim(&mut self,
f: PFunc,
args: &[@RDatum]) -> Result<@RDatum, RuntimeError>
{
match f {
PEval => match args {
[arg] => self.eval(arg),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PApply => do call_err2::<RuntimeData, GetList, @RDatum>(args) |f, l| {
self.apply(f, l.list)
},
PBegin => if args.len() == 0 {
Ok(@LExt(RUndef))
} else {
Ok(*args.last())
},
PAdd => do call_num_foldl(args, &Zero::zero()) |&lhs, &rhs| { Ok(lhs + rhs) },
PSub => match args {
[] => Err(ArgNumError(1, None, 0)),
[@LNum(ref x)] => Ok(@LNum(-*x)),
[@LNum(ref x), ..tail] =>
do call_num_foldl(tail, x) |&lhs, &rhs| { Ok(lhs - rhs) },
_ => Err(TypeError),
},
PMul => do call_num_foldl(args, &One::one()) |&lhs, &rhs| { Ok(lhs * rhs) },
PDiv => match args {
[] => Err(ArgNumError(1, None, 0)),
[@LNum(ref x)] => if x.is_zero() {
Err(DivideByZeroError)
} else {
Ok(@LNum(x.recip()))
},
[@LNum(ref x), ..tail] =>
do call_num_foldl(tail, x) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs / rhs)
}
},
_ => Err(TypeError),
},
PQuotient => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs / rhs)
}
},
PRemainder => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(lhs % rhs)
}
},
PModulo => do call_err2::<BigInt, BigInt, BigInt>(args) |&lhs, &rhs| {
if rhs.is_zero() {
Err(DivideByZeroError)
} else {
Ok(modulo(lhs, rhs))
}
},
PFloor => do call_tc1::<LReal, LReal>(args) |&x| { x.floor() },
PCeiling => do call_tc1::<LReal, LReal>(args) |&x| { x.ceil() },
PRound => do call_tc1::<LReal, LReal>(args) |&x| { x.round() },
PTruncate => do call_tc1::<LReal, LReal>(args) |&x| { x.trunc() },
PExp => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.exp() },
PLog => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.ln() },
PSin => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.sin() },
PCos => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.cos() },
PTan => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.tan() },
PAsin => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.asin() },
PAcos => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.acos() },
PAtan => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.atan() },
PSqrt => do call_tc1::<LNumeric, LNumeric>(args) |&x| { x.sqrt() },
PExpt => do call_tc2::<LNumeric, LNumeric, LNumeric>(args) |x, r| { x.pow(r) },
PMakeRectangular => do call_tc2::<LReal, LReal, LNumeric>(args) |rx, ry| {
coerce(rx, ry, |&a, &b| { exact(a, b) }, |a, b| { inexact(a, b) })
},
PMakePolar => do call_tc2::<LReal, LReal, LNumeric>(args) |rx, ry| {
polar(rx.to_inexact(), ry.to_inexact())
},
PRealPart => do call_tc1::<LNumeric, LNumeric>(args) |&x| {
match x {
NExact( Cmplx { re: ref re, im: _ } ) => from_rational(re),
NInexact( Cmplx { re: re, im: _ } ) => from_f64(re),
}
},
PImagPart => do call_tc1::<LNumeric, LNumeric>(args) |&x| {
match x {
NExact( Cmplx { re: _, im: ref im } ) => from_rational(im),
NInexact( Cmplx { re: _, im: im } ) => from_f64(im),
}
},
PMagnitude => do call_tc1::<LNumeric, LNumeric>(args) |x| {
let (norm, _) = x.to_inexact().to_polar();
from_f64(norm)
},
PAngle => do call_tc1::<LNumeric, LNumeric>(args) |x| {
let (_, arg) = x.to_inexact().to_polar();
from_f64(arg)
},
PNumerator => match args {
[@LNum(NExact( Cmplx { re: ref re, im: ref im } ))] if im.is_zero() =>
Ok(@LNum( from_bigint(re.numerator().clone()) )),
[_] =>
Err(TypeError),
_ =>
Err(ArgNumError(1, Some(1), args.len())),
},
PDenominator => match args {
[@LNum(NExact( Cmplx { re: ref re, im: ref im } ))] if im.is_zero() =>
Ok(@LNum( from_bigint(re.denominator().clone()) )),
[_] =>
Err(TypeError),
_ =>
Err(ArgNumError(1, Some(1), args.len())),
},
PCar => do call_prim1(args) |arg| {
match *arg {
LCons(h, _) => Ok(h),
_ => Err(TypeError),
}
},
PCdr => do call_prim1(args) |arg| {
match *arg {
LCons(_, t) => Ok(t),
_ => Err(TypeError),
}
},
PCons => do call_prim2(args) |arg1, arg2| { Ok(@LCons(arg1, arg2)) },
PEqv => do call_prim2(args) |arg1, arg2| {
let b =
match (arg1, arg2) {
(@LCons(_, _), @LCons(_, _)) => managed::ptr_eq(arg1, arg2),
(@LString(_), @LString(_)) => managed::ptr_eq(arg1, arg2),
(@LExt(_), @LExt(_)) => managed::ptr_eq(arg1, arg2),
_ => arg1 == arg2,
};
Ok(@LBool(b))
},
PEqual => do call_prim2(args) |arg1, arg2| {
Ok(@LBool(arg1 == arg2))
},
PNumber => do call_prim1(args) |arg| {
match *arg {
LNum(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PReal => match args {
[@LNum(ref c)] => Ok(@LBool(c.is_real())),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PInteger => do call_prim1(args) |arg| {
match *arg {
LNum(NExact(Cmplx { re: ref re, im: ref im })) =>
Ok(@LBool(*re.numerator() == One::one() && *im.numerator() == One::one())),
LNum(NInexact(Cmplx { re: re, im: im })) =>
Ok(@LBool(re.round() == re && im.round() == im)),
_ => Ok(@LBool(false)),
}
},
PExact => do call_prim1(args) |arg| {
match *arg {
LNum(NExact(_)) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PInexact => do call_prim1(args) |arg| {
match *arg {
LNum(NInexact(_)) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PExactInexact => do call_tc1::<LNumeric, LNumeric>(args) |arg| {
NInexact(arg.to_inexact())
},
PNumberString => match args {
[@LNum(ref x)] => Ok(@LString(x.to_str())),
[@LNum(ref x), @LNum(ref r)] => match get_uint(r) {
None => Err(TypeError),
Some(radix) => {
match x {
&NExact(ref n) => Ok(@LString(n.to_str_radix(radix))),
_ =>
if radix == 10 {
Ok(@LString(x.to_str()))
} else {
Err(TypeError)
},
}
},
},
[_] | [_, _] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PEQ => do call_real_bfoldl(args) |&lhs, &rhs| { lhs == rhs },
PGT => do call_real_bfoldl(args) |&lhs, &rhs| { lhs > rhs },
PLT => do call_real_bfoldl(args) |&lhs, &rhs| { lhs < rhs },
PGE => do call_real_bfoldl(args) |&lhs, &rhs| { lhs >= rhs },
PLE => do call_real_bfoldl(args) |&lhs, &rhs| { lhs <= rhs },
PNot => match args {
[@LBool(false)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PBoolean => match args {
[@LBool(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PChar => do call_prim1(args) |arg| {
match arg {
@LChar(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PProcedure => match args {
[@LExt(RUndef)] => Ok(@LBool(false)),
[@LExt(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PIsVector => match args {
[@LVector(_)] => Ok(@LBool(true)),
[_] => Ok(@LBool(false)),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PMakeVector => match args {
[@LNum(ref x)] => match get_uint(x) {
Some(k) => {
let mut v = ~[];
v.grow(k, &@LExt(RUndef));
Ok(@LVector(v))
},
None => Err(TypeError),
},
[@LNum(ref x), ref y] => match get_uint(x) {
Some(k) => {
let mut v = ~[];
v.grow(k, y);
Ok(@LVector(v))
},
None => Err(TypeError),
},
[_] | [_, _] => Err(TypeError),
_ => Err(ArgNumError(1, Some(2), args.len())),
},
PVector => Ok(@LVector(args.to_owned())),
PVectorLength => match args {
[@LVector(ref v)] => Ok(@LNum(from_uint(v.len()))),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PVectorRef => match args {
[@LVector(ref v), @LNum(ref k)] => match get_uint(k) {
Some(i) =>
if i < v.len() {
Ok(v[i])
} else {
Err(RangeError)
},
None => Err(TypeError),
},
[_, _] => Err(TypeError),
_ => Err(ArgNumError(2, Some(2), args.len())),
},
PVectorList => match args {
[@LVector(ref v)] => Ok(LDatum::from_list(*v)),
[_] => Err(TypeError),
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PListVector => match args {
[arg] => match arg.to_list() {
Some(v) => Ok(@LVector(v)),
None => Err(TypeError),
},
_ => Err(ArgNumError(1, Some(1), args.len())),
},
PNull => do call_prim1(args) |arg| {
match arg {
@LNil => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PPair => do call_prim1(args) |arg| {
match arg {
@LCons(_, _) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PIsString => do call_prim1(args) |arg| {
match arg {
@LString(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PString => {
let char_list = do result::map_vec(args) |arg| {
match *arg {
@LChar(c) => Ok(c),
_ => Err(TypeError),
}
};
match char_list {
Ok(chars) => Ok(@LString(str::from_chars(chars))),
Err(e) => Err(e)
}
},
PStringLength => do call_prim1(args) |arg| {
match arg {
@LString(ref s) => Ok(@LNum(from_int(s.len() as int))),
_ => Err(TypeError),
}
},
PStringRef => do call_prim2(args) |arg, idx| {
match (arg, idx) {
(@LString(ref s), @LNum(ref n)) => match get_uint(n) {
Some(i) => if i < s.len() {
Ok(@LChar(s.char_at(i)))
} else {
Err(RangeError)
},
None => Err(TypeError),
},
_ => Err(TypeError),
}
},
PSubstring => match args {
[@LString(ref s), @LNum(ref n)] =>
match get_uint(n) {
Some(i) => if i <= s.len() {
Ok(@LString(s.slice(i, s.len()).to_owned()))
} else {
Err(RangeError)
},
_ => Err(TypeError),
},
[@LString(ref s), @LNum(ref from), @LNum(ref to)] =>
match (get_uint(from), get_uint(to)) {
(Some(start), Some(end)) =>
if start <= end && end <= s.len() {
Ok(@LString(s.slice(start, end).to_owned()))
} else {
Err(RangeError)
},
_ => Err(TypeError),
},
[_, _] | [_, _, _] => Err(TypeError),
_ => Err(ArgNumError(2, Some(3), args.len())),
},
PSymbol => do call_prim1(args) |arg| {
match arg {
@LIdent(_) => Ok(@LBool(true)),
_ => Ok(@LBool(false)),
}
},
PSymbolString => do call_prim1(args) |arg| {
match arg {
@LIdent(ref s) => Ok(@LString(s.to_owned())),
_ => Err(TypeError),
}
},
PStringSymbol => do call_prim1(args) |arg| {
match arg {
@LString(ref s) => Ok(@LIdent(s.to_managed())),
_ => Err(TypeError),
}
},
}
}
fn recursive_qq(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
match *val {
@LCons(ref h, ref t) =>
match is_quote(h,t) {
Some((QuasiQuote, ref v)) =>
do self.quasiquote(v).map |&qv| {
@LCons(@LIdent(@"quasiquote"), @LCons(qv, @LNil))
},
Some((Unquote, ref v)) =>
self.unquote(v),
_ =>
do self.recursive_qq(h).chain |qh| {
do self.recursive_qq(t).map |&qt| {
@LCons(qh, qt)
}
},
},
@LVector(ref v) => {
match result::map_vec(*v, |x| { self.recursive_qq(x) }) {
Ok(qmap) => Ok(@LVector(qmap)),
Err(e) => Err(e),
}
},
_ =>
Ok(*val),
}
}
fn quasiquote(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
self.qq_lvl += 1;
let res = self.recursive_qq(val);
self.qq_lvl -= 1;
res
}
fn unquote(&mut self, val: &@RDatum) -> Result<@RDatum, RuntimeError> {
if self.qq_lvl == 0 {
Err(BadSyntax(SynUnquote, ~"unquote not nested in quasiquote"))
} else {
self.qq_lvl -= 1;
let res =
if self.qq_lvl == 0 {
self.eval(*val)
} else {
do self.recursive_qq(val).map |&qval| {
@LCons(@LIdent(@"unquote"), @LCons(qval, @LNil))
}
};
self.qq_lvl += 1;
res
}
}
fn apply(&mut self, proc: &RuntimeData, args: &[@RDatum]) -> Result<@RDatum, RuntimeError> {
match proc {
&RUndef =>
Err(NotCallable),
&RPrim(f) =>
self.call_prim(f, args),
&RProc(ref anames, ref vargs, ref code, ref env) =>
self.call_proc(*anames, *vargs, *code, *env, args),
}
}
fn call(&mut self, proc: &RuntimeData, aexprs: ~[@RDatum]) -> Result<@RDatum, RuntimeError> {
match result::map_vec(aexprs, |&expr| self.eval(expr))
{
Ok(args) => self.apply(proc, args),
Err(e) => Err(e),
}
}
pub fn new_std() -> Runtime {
Runtime {
stdin: io::stdin(),
stdout: io::stdout(),
stderr: io::stderr(),
env: @mut Stack::new(),
global: load_prelude(),
qq_lvl: 0,
}
}
pub fn eval(&mut self, val: @RDatum) -> Result<@RDatum, RuntimeError>
{
match *val {
LIdent(name) => self.find_var(&name),
LCons(fexpr, aexpr) =>
match aexpr.to_list() {
None => Err(NotList),
Some(aexprs) => {
match self.get_syntax(fexpr) {
Some(syntax) =>
self.run_syntax(syntax, aexprs),
None =>
match self.eval(fexpr) {
Ok(@LExt(ref proc)) => self.call(proc, aexprs),
Ok(_) => Err(NotCallable),
Err(e) => Err(e),
},
}
},
},
LNil => Err(NilEval),
_ => Ok(val),
}
}
pub fn load(&mut self, rdr: @io::Reader) -> Result<@RDatum, RuntimeError>
{
let mut parser = Parser(rdr);
match parser.parse() {
Ok(datum) => self.eval(@datum),
Err(e) => {
let (line, col) = parser.pos();
Err(ParseError(line, col, e))
},
}
}
}
priv fn set_var(env: @mut Stack<HashMap<@str, @RDatum>>,
name: &@str,
val: @RDatum) -> bool {
let mut success = false;
do env.each_mut |frame| {
match frame.find_mut(name) {
None => (),
Some(v) => {
success = true;
*v = val;
}
}
!success
};
success
}
|
//! Types and functions used for Rust code generation.
use crate::doc_formatter;
use crate::rust_info::RustDatabase;
use crate::rust_info::RustDatabaseItem;
use crate::rust_info::RustEnumValue;
use crate::rust_info::RustFFIFunction;
use crate::rust_info::RustFfiWrapperData;
use crate::rust_info::RustFunction;
use crate::rust_info::RustFunctionArgument;
use crate::rust_info::RustFunctionKind;
use crate::rust_info::RustItemKind;
use crate::rust_info::RustModule;
use crate::rust_info::RustModuleKind;
use crate::rust_info::RustStruct;
use crate::rust_info::RustStructKind;
use crate::rust_info::RustTraitImpl;
use crate::rust_info::RustWrapperTypeKind;
use crate::rust_type::RustCommonType;
use crate::rust_type::RustFinalType;
use crate::rust_type::RustPath;
use crate::rust_type::RustPointerLikeTypeKind;
use crate::rust_type::RustToFfiTypeConversion;
use crate::rust_type::RustType;
use itertools::Itertools;
use ritual_common::errors::{bail, err_msg, unexpected, Result};
use ritual_common::file_utils::create_dir_all;
use ritual_common::file_utils::create_file;
use ritual_common::file_utils::File;
use ritual_common::string_utils::trim_slice;
use ritual_common::utils::MapIfOk;
use std::fs;
use std::io;
use std::io::BufWriter;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
/// Generates Rust code representing type `rust_type` inside crate `crate_name`.
/// Same as `RustCodeGenerator::rust_type_to_code`, but accessible by other modules.
pub fn rust_type_to_code(rust_type: &RustType, current_crate: &str) -> String {
match *rust_type {
RustType::Unit => "()".to_string(),
RustType::PointerLike {
ref kind,
ref target,
ref is_const,
} => {
let target_code = rust_type_to_code(target, current_crate);
match *kind {
RustPointerLikeTypeKind::Pointer => {
if *is_const {
format!("*const {}", target_code)
} else {
format!("*mut {}", target_code)
}
}
RustPointerLikeTypeKind::Reference { ref lifetime } => {
let lifetime_text = match *lifetime {
Some(ref lifetime) => format!("'{} ", lifetime),
None => String::new(),
};
if *is_const {
format!("&{}{}", lifetime_text, target_code)
} else {
format!("&{}mut {}", lifetime_text, target_code)
}
}
}
}
RustType::Common(RustCommonType {
ref path,
ref generic_arguments,
}) => {
let mut code = path.full_name(Some(current_crate));
if let Some(ref args) = *generic_arguments {
code = format!(
"{}<{}>",
code,
args.iter()
.map(|x| rust_type_to_code(x, current_crate))
.join(", ",)
);
}
code
}
RustType::FunctionPointer {
ref return_type,
ref arguments,
} => format!(
"extern \"C\" fn({}){}",
arguments
.iter()
.map(|arg| rust_type_to_code(arg, current_crate))
.join(", "),
match return_type.as_ref() {
&RustType::Unit => String::new(),
return_type => format!(" -> {}", rust_type_to_code(return_type, current_crate)),
}
),
}
}
struct Generator {
crate_name: String,
output_src_path: PathBuf,
destination: Vec<File<BufWriter<fs::File>>>,
}
impl Write for Generator {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
io::Write::write(
self.destination
.last_mut()
.expect("generator: no open files"),
buf,
)
}
fn flush(&mut self) -> io::Result<()> {
self.destination
.last_mut()
.expect("generator: no open files")
.flush()
}
}
/// Generates documentation comments containing
/// markdown code `doc`.
fn format_doc(doc: &str) -> String {
format_doc_extended(doc, false)
}
/// Generates documentation comments containing
/// markdown code `doc`.
fn format_doc_extended(doc: &str, is_outer: bool) -> String {
if doc.is_empty() {
return String::new();
}
let prefix = if is_outer { "//! " } else { "/// " };
let lines = doc.split('\n').collect_vec();
let lines = trim_slice(&lines, |x| x.is_empty());
if lines.is_empty() {
return String::new();
}
let extra_line_breaks = if is_outer { "\n\n" } else { "\n" };
lines
.iter()
.map(|x| {
if x.starts_with(" ") {
format!("{}{}", prefix, x.replace(" ", "  "))
} else {
format!("{}{}", prefix, x)
}
})
.join("\n")
+ extra_line_breaks
}
impl Generator {
fn module_path(&self, rust_path: &RustPath) -> Result<PathBuf> {
let parts = &rust_path.parts;
assert_eq!(
&parts[0], &self.crate_name,
"Generator::push_file expects path from this crate"
);
let path = if parts.len() == 1 {
self.output_src_path.join("lib.rs")
} else {
let mut path = self.output_src_path.clone();
for middle_part in &parts[1..parts.len() - 1] {
path.push(middle_part);
}
create_dir_all(&path)?;
path.push(format!("{}.rs", parts.last().expect("path is empty")));
path
};
Ok(path)
}
fn push_file(&mut self, path: &Path) -> Result<()> {
self.destination.push(create_file(path)?);
Ok(())
}
fn pop_file(&mut self) {
self.destination
.pop()
.expect("generator: too much pop_file");
}
fn generate_item(&mut self, item: &RustDatabaseItem, database: &RustDatabase) -> Result<()> {
match item.kind {
RustItemKind::Module(ref module) => self.generate_module(module, database),
RustItemKind::Struct(ref data) => self.generate_struct(data, database),
RustItemKind::EnumValue(ref value) => self.generate_enum_value(value),
RustItemKind::TraitImpl(ref value) => self.generate_trait_impl(value),
RustItemKind::Function(ref value) => self.generate_rust_final_function(value, false),
RustItemKind::FfiFunction(ref value) => self.generate_ffi_function(value),
}
}
fn rust_type_to_code(&self, rust_type: &RustType) -> String {
rust_type_to_code(rust_type, &self.crate_name)
}
fn generate_module(&mut self, module: &RustModule, database: &RustDatabase) -> Result<()> {
if module.kind.is_in_separate_file() {
if module.kind != RustModuleKind::CrateRoot {
writeln!(self, "pub mod {};", module.path.last())?;
}
let path = self.module_path(&module.path)?;
self.push_file(&path)?;
} else {
assert_ne!(module.kind, RustModuleKind::CrateRoot);
writeln!(self, "pub mod {} {{", module.path.last())?;
}
if let RustModuleKind::Ffi = module.kind {
// TODO: shouldn't need this
writeln!(self, "#![allow(dead_code)]")?;
}
write!(
self,
"{}",
format_doc_extended(&doc_formatter::module_doc(&module.doc), true)
)?;
match module.kind {
RustModuleKind::Ffi => {
writeln!(self, "include!(concat!(env!(\"OUT_DIR\"), \"/ffi.rs\"));")?;
}
RustModuleKind::SizedTypes => {
writeln!(
self,
"include!(concat!(env!(\"OUT_DIR\"), \"/sized_types.rs\"));"
)?;
}
RustModuleKind::CrateRoot | RustModuleKind::Normal => {
self.generate_children(&module.path, database)?;
}
}
if module.kind.is_in_separate_file() {
self.pop_file();
} else {
// close `mod {}`
writeln!(self, "}}")?;
}
if module.kind == RustModuleKind::Ffi {
let path = self.output_src_path.join("ffi.in.rs");
self.destination.push(create_file(&path)?);
self.generate_children(&module.path, database)?;
self.pop_file();
}
Ok(())
}
fn generate_struct(&mut self, rust_struct: &RustStruct, database: &RustDatabase) -> Result<()> {
write!(
self,
"{}",
format_doc(&doc_formatter::struct_doc(rust_struct))
)?;
let visibility = if rust_struct.is_public { "pub " } else { "" };
match rust_struct.kind {
RustStructKind::WrapperType(ref wrapper) => match wrapper.kind {
RustWrapperTypeKind::EnumWrapper => {
writeln!(
self,
include_str!("../templates/crate/enum_wrapper.rs.in"),
vis = visibility,
name = rust_struct.path.last()
)?;
}
RustWrapperTypeKind::ImmovableClassWrapper => {
writeln!(self, "#[repr(C)]")?;
writeln!(
self,
"{}struct {} {{ _unused: u8, }}",
visibility,
rust_struct.path.last()
)?;
}
RustWrapperTypeKind::MovableClassWrapper {
ref sized_type_path,
} => {
writeln!(
self,
"{}struct {}({});",
visibility,
rust_struct.path.last(),
self.rust_path_to_string(sized_type_path),
)?;
writeln!(self)?;
}
},
RustStructKind::QtSlotWrapper(ref slot_wrapper) => {
let arg_texts: Vec<_> = slot_wrapper
.arguments
.iter()
.map(|t| self.rust_type_to_code(&t.api_type))
.collect();
let args = arg_texts.join(", ");
let args_tuple = format!("{}{}", args, if arg_texts.len() == 1 { "," } else { "" });
let connections_mod =
RustPath::from_parts(vec!["qt_core".to_string(), "connection".to_string()])
.full_name(Some(&self.crate_name));
let object_type_name = RustPath::from_parts(vec![
"qt_core".to_string(),
"object".to_string(),
"Object".to_string(),
])
.full_name(Some(&self.crate_name));
let callback_args = slot_wrapper
.arguments
.iter()
.enumerate()
.map(|(num, t)| format!("arg{}: {}", num, self.rust_type_to_code(&t.ffi_type)))
.join(", ");
let func_args = slot_wrapper
.arguments
.iter()
.enumerate()
.map_if_ok(|(num, t)| {
self.convert_type_from_ffi(t, format!("arg{}", num), false, false)
})?
.join(", ");
writeln!(
self,
include_str!("../templates/crate/closure_slot_wrapper.rs.in"),
type_name = rust_struct.path.full_name(Some(&self.crate_name)),
pub_type_name = rust_struct.path.last(),
callback_name = self.rust_path_to_string(&slot_wrapper.callback_path),
args = args,
args_tuple = args_tuple,
connections_mod = connections_mod,
object_type_name = object_type_name,
func_args = func_args,
callback_args = callback_args,
)?;
}
RustStructKind::SizedType(_) => {
bail!("sized struct can't be generated with rust code generator")
}
}
if database.children(&rust_struct.path).next().is_some() {
writeln!(self, "impl {} {{", rust_struct.path.last())?;
self.generate_children(&rust_struct.path, database)?;
writeln!(self, "}}")?;
writeln!(self)?;
}
Ok(())
}
fn generate_enum_value(&mut self, value: &RustEnumValue) -> Result<()> {
write!(
self,
"{}",
format_doc(&doc_formatter::enum_value_doc(value))
)?;
let struct_path =
self.rust_path_to_string(&value.path.parent().expect("enum value must have parent"));
writeln!(self, "#[allow(non_upper_case_globals)]")?;
writeln!(
self,
"pub const {value_name}: {struct_path} = {struct_path}({value});",
value_name = value.path.last(),
struct_path = struct_path,
value = value.value
)?;
Ok(())
}
// TODO: generate relative paths for better readability
fn rust_path_to_string(&self, path: &RustPath) -> String {
path.full_name(Some(&self.crate_name))
}
/// Generates Rust code containing declaration of a FFI function `func`.
fn rust_ffi_function_to_code(&self, func: &RustFFIFunction) -> String {
let mut args = func.arguments.iter().map(|arg| {
format!(
"{}: {}",
arg.name,
self.rust_type_to_code(&arg.argument_type)
)
});
format!(
" pub fn {}({}){};\n",
func.path.last(),
args.join(", "),
match func.return_type {
RustType::Unit => String::new(),
_ => format!(" -> {}", self.rust_type_to_code(&func.return_type)),
}
)
}
/// Wraps `expression` of type `type1.rust_ffi_type` to convert
/// it to type `type1.rust_api_type`.
/// If `in_unsafe_context` is `true`, the output code will be placed inside
/// an `unsafe` block.
/// If `use_ffi_result_var` is `true`, the output code will assign
/// the value to a temporary variable `ffi_result` and return it.
fn convert_type_from_ffi(
&self,
type1: &RustFinalType,
expression: String,
in_unsafe_context: bool,
use_ffi_result_var: bool,
) -> Result<String> {
let (unsafe_start, unsafe_end) = if in_unsafe_context {
("", "")
} else {
("unsafe { ", " }")
};
if type1.api_to_ffi_conversion == RustToFfiTypeConversion::None {
return Ok(expression);
}
let (code1, source_expr) = if use_ffi_result_var {
(
format!("let ffi_result = {};\n", expression),
"ffi_result".to_string(),
)
} else {
(String::new(), expression)
};
let code2 = match type1.api_to_ffi_conversion {
RustToFfiTypeConversion::None => unreachable!(),
RustToFfiTypeConversion::RefToPtr => {
let api_is_const = type1.api_type.is_const_pointer_like()?;
let unwrap_code = ".expect(\"Attempted to convert null pointer to reference\")";
format!(
"{unsafe_start}{}.{}(){unsafe_end}{}",
source_expr,
if api_is_const { "as_ref" } else { "as_mut" },
unwrap_code,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
)
}
RustToFfiTypeConversion::ValueToPtr => format!(
"{unsafe_start}*{}{unsafe_end}",
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
),
RustToFfiTypeConversion::CppBoxToPtr => format!(
"{unsafe_start}::cpp_utils::CppBox::new({}){unsafe_end}",
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
),
RustToFfiTypeConversion::PtrWrapperToPtr
| RustToFfiTypeConversion::OptionPtrWrapperToPtr => {
let is_option =
type1.api_to_ffi_conversion == RustToFfiTypeConversion::OptionPtrWrapperToPtr;
let ptr_wrapper_type = if is_option {
type1
.api_type
.as_common()?
.generic_arguments
.as_ref()
.ok_or_else(|| err_msg("expected generic argument for Option"))?
.get(0)
.ok_or_else(|| err_msg("expected generic argument for Option"))?
} else {
&type1.api_type
};
let ptr_wrapper_path = &ptr_wrapper_type.as_common()?.path;
format!(
"{unsafe_start}{}::{}({}){unsafe_end}",
self.rust_path_to_string(ptr_wrapper_path),
if is_option { "new_option" } else { "new" },
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
)
}
RustToFfiTypeConversion::QFlagsToUInt => {
let mut qflags_type = type1.api_type.clone();
if let RustType::Common(RustCommonType {
ref mut generic_arguments,
..
}) = qflags_type
{
*generic_arguments = None;
} else {
unreachable!();
}
format!(
"{}::from_int({} as i32)",
self.rust_type_to_code(&qflags_type),
source_expr
)
}
};
Ok(code1 + &code2)
}
/// Generates Rust code for calling an FFI function from a wrapper function.
/// If `in_unsafe_context` is `true`, the output code will be placed inside
/// an `unsafe` block.
fn generate_ffi_call(
&self,
arguments: &[RustFunctionArgument],
return_type: &RustFinalType,
wrapper_data: &RustFfiWrapperData,
in_unsafe_context: bool,
) -> Result<String> {
let (unsafe_start, unsafe_end) = if in_unsafe_context {
("", "")
} else {
("unsafe { ", " }")
};
let mut final_args = Vec::new();
final_args.resize(wrapper_data.cpp_ffi_function.arguments.len(), None);
let all_args: Vec<RustFunctionArgument> = Vec::from(arguments);
for arg in &all_args {
assert!(arg.ffi_index < final_args.len());
let mut code = arg.name.clone();
match arg.argument_type.api_to_ffi_conversion {
RustToFfiTypeConversion::None => {}
RustToFfiTypeConversion::OptionPtrWrapperToPtr => {
bail!("OptionRefToPtr is not supported here yet");
}
RustToFfiTypeConversion::RefToPtr => {
if arg.argument_type.api_type.is_const()?
&& !arg.argument_type.ffi_type.is_const()?
{
let mut intermediate_type = arg.argument_type.ffi_type.clone();
intermediate_type.set_const(true)?;
code = format!(
"{} as {} as {}",
code,
self.rust_type_to_code(&intermediate_type),
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
} else {
code = format!(
"{} as {}",
code,
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
}
}
RustToFfiTypeConversion::ValueToPtr => {
let is_const = arg.argument_type.ffi_type.is_const_pointer_like()?;
code = format!(
"{}{} as {}",
if is_const { "&" } else { "&mut " },
code,
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
}
RustToFfiTypeConversion::CppBoxToPtr | RustToFfiTypeConversion::PtrWrapperToPtr => {
let is_const = arg.argument_type.ffi_type.is_const_pointer_like()?;
let method = if is_const { "as_ptr" } else { "as_mut_ptr" };
code = format!("{}.{}()", code, method);
}
RustToFfiTypeConversion::QFlagsToUInt => {
code = format!("{}.to_int() as ::libc::c_uint", code);
}
}
final_args[arg.ffi_index] = Some(code);
}
let mut result = Vec::new();
let mut maybe_result_var_name = None;
if let Some(ref i) = wrapper_data.return_type_ffi_index {
let mut return_var_name = "object".to_string();
let mut ii = 1;
while arguments.iter().any(|x| x.name == return_var_name) {
ii += 1;
return_var_name = format!("object{}", ii);
}
let struct_name =
if return_type.api_to_ffi_conversion == RustToFfiTypeConversion::CppBoxToPtr {
if let RustType::Common(RustCommonType {
ref generic_arguments,
..
}) = return_type.api_type
{
let generic_arguments = generic_arguments
.as_ref()
.ok_or_else(|| err_msg("CppBox must have generic_arguments"))?;
let arg = generic_arguments.get(0).ok_or_else(|| {
err_msg("CppBox must have non-empty generic_arguments")
})?;
self.rust_type_to_code(arg)
} else {
unexpected!("CppBox type expected");
}
} else {
self.rust_type_to_code(&return_type.api_type)
};
result.push(format!(
"{{\nlet mut {var}: {t} = {unsafe_start}\
::cpp_utils::new_uninitialized::NewUninitialized::new_uninitialized()\
{unsafe_end};\n",
var = return_var_name,
t = struct_name,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
));
final_args[*i as usize] = Some(format!("&mut {}", return_var_name));
maybe_result_var_name = Some(return_var_name);
}
let final_args = final_args
.into_iter()
.map_if_ok(|x| x.ok_or_else(|| err_msg("ffi argument is missing")))?;
result.push(format!(
"{unsafe_start}{}({}){maybe_semicolon}{unsafe_end}",
self.rust_path_to_string(&wrapper_data.ffi_function_path),
final_args.join(", "),
maybe_semicolon = if maybe_result_var_name.is_some() {
";"
} else {
""
},
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
));
if let Some(ref name) = maybe_result_var_name {
result.push(format!("{}\n}}", name));
}
let code = result.join("");
if maybe_result_var_name.is_none() {
self.convert_type_from_ffi(&return_type, code, in_unsafe_context, true)
} else {
Ok(code)
}
}
/// Generates Rust code for declaring a function's arguments.
fn arg_texts(&self, args: &[RustFunctionArgument], lifetime: Option<&String>) -> Vec<String> {
args.iter()
.map(|arg| {
if &arg.name == "self" {
let self_type = match lifetime {
Some(lifetime) => {
arg.argument_type.api_type.with_lifetime(lifetime.clone())
}
None => arg.argument_type.api_type.clone(),
};
match self_type {
RustType::Common { .. } => "self".to_string(),
RustType::PointerLike {
ref kind,
ref is_const,
..
} => {
if let RustPointerLikeTypeKind::Reference { ref lifetime } = *kind {
let maybe_mut = if *is_const { "" } else { "mut " };
match *lifetime {
Some(ref lifetime) => {
format!("&'{} {}self", lifetime, maybe_mut)
}
None => format!("&{}self", maybe_mut),
}
} else {
panic!("invalid self argument type (indirection)");
}
}
_ => {
panic!("invalid self argument type (not Common)");
}
}
} else {
let mut maybe_mut_declaration = "";
if let RustType::Common { .. } = arg.argument_type.api_type {
if arg.argument_type.api_to_ffi_conversion
== RustToFfiTypeConversion::ValueToPtr
{
if let RustType::PointerLike { ref is_const, .. } =
arg.argument_type.ffi_type
{
if !*is_const {
maybe_mut_declaration = "mut ";
}
}
}
}
format!(
"{}{}: {}",
maybe_mut_declaration,
arg.name,
match lifetime {
Some(lifetime) => self.rust_type_to_code(
&arg.argument_type.api_type.with_lifetime(lifetime.clone(),)
),
None => self.rust_type_to_code(&arg.argument_type.api_type),
}
)
}
})
.collect()
}
/// Generates complete code of a Rust wrapper function.
fn generate_rust_final_function(
&mut self,
func: &RustFunction,
is_in_trait_context: bool,
) -> Result<()> {
let maybe_pub = if func.is_public && !is_in_trait_context {
"pub "
} else {
""
};
let maybe_unsafe = if func.is_unsafe { "unsafe " } else { "" };
let body = match func.kind {
RustFunctionKind::FfiWrapper(ref data) => {
self.generate_ffi_call(&func.arguments, &func.return_type, data, func.is_unsafe)?
}
RustFunctionKind::CppDeletableImpl { ref deleter } => self.rust_path_to_string(deleter),
RustFunctionKind::SignalOrSlotGetter { .. } => unimplemented!(),
};
let return_type_for_signature = if func.return_type.api_type == RustType::Unit {
String::new()
} else {
format!(" -> {}", self.rust_type_to_code(&func.return_type.api_type))
};
let all_lifetimes: Vec<_> = func
.arguments
.iter()
.filter_map(|x| x.argument_type.api_type.lifetime())
.collect();
let lifetimes_text = if all_lifetimes.is_empty() {
String::new()
} else {
format!(
"<{}>",
all_lifetimes.iter().map(|x| format!("'{}", x)).join(", ")
)
};
writeln!(
self,
"{doc}{maybe_pub}{maybe_unsafe}fn {name}{lifetimes_text}({args}){return_type} \
{{\n{body}}}\n\n",
doc = format_doc(&doc_formatter::function_doc(&func)),
maybe_pub = maybe_pub,
maybe_unsafe = maybe_unsafe,
lifetimes_text = lifetimes_text,
name = func.path.last(),
args = self.arg_texts(&func.arguments, None).join(", "),
return_type = return_type_for_signature,
body = body
)?;
Ok(())
}
fn generate_children(&mut self, parent: &RustPath, database: &RustDatabase) -> Result<()> {
if database
.children(&parent)
.any(|item| item.kind.is_ffi_function())
{
writeln!(self, "extern \"C\" {{\n")?;
for item in database
.children(&parent)
.filter(|item| item.kind.is_ffi_function())
{
self.generate_item(item, database)?;
}
writeln!(self, "}}\n")?;
}
for item in database
.children(&parent)
.filter(|item| !item.kind.is_ffi_function())
{
self.generate_item(item, database)?;
}
// TODO: somehow add items from crate template
Ok(())
}
fn generate_trait_impl(&mut self, trait1: &RustTraitImpl) -> Result<()> {
let associated_types_text = trait1
.associated_types
.iter()
.map(|t| format!("type {} = {};", t.name, self.rust_type_to_code(&t.value)))
.join("\n");
writeln!(
self,
"impl {} for {} {{\n{}",
self.rust_type_to_code(&trait1.trait_type),
self.rust_type_to_code(&trait1.target_type),
associated_types_text,
)?;
for func in &trait1.functions {
self.generate_rust_final_function(func, true)?;
}
writeln!(self, "}}\n")?;
Ok(())
}
fn generate_ffi_function(&mut self, function: &RustFFIFunction) -> Result<()> {
writeln!(self, "{}", self.rust_ffi_function_to_code(function))?;
Ok(())
}
}
pub fn generate(
crate_name: &str,
database: &RustDatabase,
output_src_path: impl Into<PathBuf>,
) -> Result<()> {
let mut generator = Generator {
crate_name: crate_name.to_string(),
destination: Vec::new(),
output_src_path: output_src_path.into(),
};
let crate_root = database
.items
.iter()
.filter_map(|item| item.as_module_ref())
.find(|module| module.kind == RustModuleKind::CrateRoot)
.ok_or_else(|| err_msg("crate root not found"))?;
generator.generate_module(crate_root, database)?;
Ok(())
}
// TODO: reimplement impl FlaggableEnum
/*
if *is_flaggable {
r = r + &format!(
include_str!("../templates/crate/impl_flaggable.rs.in"),
name = type1.name.last_name()?,
trait_type = RustName::new(vec![
"qt_core".to_string(),
"flags".to_string(),
"FlaggableEnum".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()))
);
}
*/
// TODO: reimplement impl Receiver for raw slot wrapper
/*
if let Some(ref slot_wrapper) = *slot_wrapper {
let arg_texts: Vec<_> = slot_wrapper
.arguments
.iter()
.map(|t| self.rust_type_to_code(&t.rust_api_type))
.collect();
let args = arg_texts.join(", ");
let args_tuple = format!(
"{}{}",
args,
if arg_texts.len() == 1 { "," } else { "" }
);
let connections_mod = RustName::new(vec![
"qt_core".to_string(),
"connection".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()));
let object_type_name = RustName::new(vec![
"qt_core".to_string(),
"object".to_string(),
"Object".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()));
r.push_str(&format!(
include_str!(
"../templates/crate/extern_slot_impl_receiver.rs.in"
),
type_name = type1
.name
.full_name(Some(&self.config.crate_properties.name())),
args_tuple = args_tuple,
receiver_id = slot_wrapper.receiver_id,
connections_mod = connections_mod,
object_type_name = object_type_name
));
}
*/
Use mem::uninitialized() for movable types
//! Types and functions used for Rust code generation.
use crate::doc_formatter;
use crate::rust_info::RustDatabase;
use crate::rust_info::RustDatabaseItem;
use crate::rust_info::RustEnumValue;
use crate::rust_info::RustFFIFunction;
use crate::rust_info::RustFfiWrapperData;
use crate::rust_info::RustFunction;
use crate::rust_info::RustFunctionArgument;
use crate::rust_info::RustFunctionKind;
use crate::rust_info::RustItemKind;
use crate::rust_info::RustModule;
use crate::rust_info::RustModuleKind;
use crate::rust_info::RustStruct;
use crate::rust_info::RustStructKind;
use crate::rust_info::RustTraitImpl;
use crate::rust_info::RustWrapperTypeKind;
use crate::rust_type::RustCommonType;
use crate::rust_type::RustFinalType;
use crate::rust_type::RustPath;
use crate::rust_type::RustPointerLikeTypeKind;
use crate::rust_type::RustToFfiTypeConversion;
use crate::rust_type::RustType;
use itertools::Itertools;
use ritual_common::errors::{bail, err_msg, unexpected, Result};
use ritual_common::file_utils::create_dir_all;
use ritual_common::file_utils::create_file;
use ritual_common::file_utils::File;
use ritual_common::string_utils::trim_slice;
use ritual_common::utils::MapIfOk;
use std::fs;
use std::io;
use std::io::BufWriter;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
/// Generates Rust code representing type `rust_type` inside crate `crate_name`.
/// Same as `RustCodeGenerator::rust_type_to_code`, but accessible by other modules.
pub fn rust_type_to_code(rust_type: &RustType, current_crate: &str) -> String {
match *rust_type {
RustType::Unit => "()".to_string(),
RustType::PointerLike {
ref kind,
ref target,
ref is_const,
} => {
let target_code = rust_type_to_code(target, current_crate);
match *kind {
RustPointerLikeTypeKind::Pointer => {
if *is_const {
format!("*const {}", target_code)
} else {
format!("*mut {}", target_code)
}
}
RustPointerLikeTypeKind::Reference { ref lifetime } => {
let lifetime_text = match *lifetime {
Some(ref lifetime) => format!("'{} ", lifetime),
None => String::new(),
};
if *is_const {
format!("&{}{}", lifetime_text, target_code)
} else {
format!("&{}mut {}", lifetime_text, target_code)
}
}
}
}
RustType::Common(RustCommonType {
ref path,
ref generic_arguments,
}) => {
let mut code = path.full_name(Some(current_crate));
if let Some(ref args) = *generic_arguments {
code = format!(
"{}<{}>",
code,
args.iter()
.map(|x| rust_type_to_code(x, current_crate))
.join(", ",)
);
}
code
}
RustType::FunctionPointer {
ref return_type,
ref arguments,
} => format!(
"extern \"C\" fn({}){}",
arguments
.iter()
.map(|arg| rust_type_to_code(arg, current_crate))
.join(", "),
match return_type.as_ref() {
&RustType::Unit => String::new(),
return_type => format!(" -> {}", rust_type_to_code(return_type, current_crate)),
}
),
}
}
struct Generator {
crate_name: String,
output_src_path: PathBuf,
destination: Vec<File<BufWriter<fs::File>>>,
}
impl Write for Generator {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
io::Write::write(
self.destination
.last_mut()
.expect("generator: no open files"),
buf,
)
}
fn flush(&mut self) -> io::Result<()> {
self.destination
.last_mut()
.expect("generator: no open files")
.flush()
}
}
/// Generates documentation comments containing
/// markdown code `doc`.
fn format_doc(doc: &str) -> String {
format_doc_extended(doc, false)
}
/// Generates documentation comments containing
/// markdown code `doc`.
fn format_doc_extended(doc: &str, is_outer: bool) -> String {
if doc.is_empty() {
return String::new();
}
let prefix = if is_outer { "//! " } else { "/// " };
let lines = doc.split('\n').collect_vec();
let lines = trim_slice(&lines, |x| x.is_empty());
if lines.is_empty() {
return String::new();
}
let extra_line_breaks = if is_outer { "\n\n" } else { "\n" };
lines
.iter()
.map(|x| {
if x.starts_with(" ") {
format!("{}{}", prefix, x.replace(" ", "  "))
} else {
format!("{}{}", prefix, x)
}
})
.join("\n")
+ extra_line_breaks
}
impl Generator {
fn module_path(&self, rust_path: &RustPath) -> Result<PathBuf> {
let parts = &rust_path.parts;
assert_eq!(
&parts[0], &self.crate_name,
"Generator::push_file expects path from this crate"
);
let path = if parts.len() == 1 {
self.output_src_path.join("lib.rs")
} else {
let mut path = self.output_src_path.clone();
for middle_part in &parts[1..parts.len() - 1] {
path.push(middle_part);
}
create_dir_all(&path)?;
path.push(format!("{}.rs", parts.last().expect("path is empty")));
path
};
Ok(path)
}
fn push_file(&mut self, path: &Path) -> Result<()> {
self.destination.push(create_file(path)?);
Ok(())
}
fn pop_file(&mut self) {
self.destination
.pop()
.expect("generator: too much pop_file");
}
fn generate_item(&mut self, item: &RustDatabaseItem, database: &RustDatabase) -> Result<()> {
match item.kind {
RustItemKind::Module(ref module) => self.generate_module(module, database),
RustItemKind::Struct(ref data) => self.generate_struct(data, database),
RustItemKind::EnumValue(ref value) => self.generate_enum_value(value),
RustItemKind::TraitImpl(ref value) => self.generate_trait_impl(value),
RustItemKind::Function(ref value) => self.generate_rust_final_function(value, false),
RustItemKind::FfiFunction(ref value) => self.generate_ffi_function(value),
}
}
fn rust_type_to_code(&self, rust_type: &RustType) -> String {
rust_type_to_code(rust_type, &self.crate_name)
}
fn generate_module(&mut self, module: &RustModule, database: &RustDatabase) -> Result<()> {
if module.kind.is_in_separate_file() {
if module.kind != RustModuleKind::CrateRoot {
writeln!(self, "pub mod {};", module.path.last())?;
}
let path = self.module_path(&module.path)?;
self.push_file(&path)?;
} else {
assert_ne!(module.kind, RustModuleKind::CrateRoot);
writeln!(self, "pub mod {} {{", module.path.last())?;
}
if let RustModuleKind::Ffi = module.kind {
// TODO: shouldn't need this
writeln!(self, "#![allow(dead_code)]")?;
}
write!(
self,
"{}",
format_doc_extended(&doc_formatter::module_doc(&module.doc), true)
)?;
match module.kind {
RustModuleKind::Ffi => {
writeln!(self, "include!(concat!(env!(\"OUT_DIR\"), \"/ffi.rs\"));")?;
}
RustModuleKind::SizedTypes => {
writeln!(
self,
"include!(concat!(env!(\"OUT_DIR\"), \"/sized_types.rs\"));"
)?;
}
RustModuleKind::CrateRoot | RustModuleKind::Normal => {
self.generate_children(&module.path, database)?;
}
}
if module.kind.is_in_separate_file() {
self.pop_file();
} else {
// close `mod {}`
writeln!(self, "}}")?;
}
if module.kind == RustModuleKind::Ffi {
let path = self.output_src_path.join("ffi.in.rs");
self.destination.push(create_file(&path)?);
self.generate_children(&module.path, database)?;
self.pop_file();
}
Ok(())
}
fn generate_struct(&mut self, rust_struct: &RustStruct, database: &RustDatabase) -> Result<()> {
write!(
self,
"{}",
format_doc(&doc_formatter::struct_doc(rust_struct))
)?;
let visibility = if rust_struct.is_public { "pub " } else { "" };
match rust_struct.kind {
RustStructKind::WrapperType(ref wrapper) => match wrapper.kind {
RustWrapperTypeKind::EnumWrapper => {
writeln!(
self,
include_str!("../templates/crate/enum_wrapper.rs.in"),
vis = visibility,
name = rust_struct.path.last()
)?;
}
RustWrapperTypeKind::ImmovableClassWrapper => {
writeln!(self, "#[repr(C)]")?;
writeln!(
self,
"{}struct {} {{ _unused: u8, }}",
visibility,
rust_struct.path.last()
)?;
}
RustWrapperTypeKind::MovableClassWrapper {
ref sized_type_path,
} => {
writeln!(
self,
"{}struct {}({});",
visibility,
rust_struct.path.last(),
self.rust_path_to_string(sized_type_path),
)?;
writeln!(self)?;
}
},
RustStructKind::QtSlotWrapper(ref slot_wrapper) => {
let arg_texts: Vec<_> = slot_wrapper
.arguments
.iter()
.map(|t| self.rust_type_to_code(&t.api_type))
.collect();
let args = arg_texts.join(", ");
let args_tuple = format!("{}{}", args, if arg_texts.len() == 1 { "," } else { "" });
let connections_mod =
RustPath::from_parts(vec!["qt_core".to_string(), "connection".to_string()])
.full_name(Some(&self.crate_name));
let object_type_name = RustPath::from_parts(vec![
"qt_core".to_string(),
"object".to_string(),
"Object".to_string(),
])
.full_name(Some(&self.crate_name));
let callback_args = slot_wrapper
.arguments
.iter()
.enumerate()
.map(|(num, t)| format!("arg{}: {}", num, self.rust_type_to_code(&t.ffi_type)))
.join(", ");
let func_args = slot_wrapper
.arguments
.iter()
.enumerate()
.map_if_ok(|(num, t)| {
self.convert_type_from_ffi(t, format!("arg{}", num), false, false)
})?
.join(", ");
writeln!(
self,
include_str!("../templates/crate/closure_slot_wrapper.rs.in"),
type_name = rust_struct.path.full_name(Some(&self.crate_name)),
pub_type_name = rust_struct.path.last(),
callback_name = self.rust_path_to_string(&slot_wrapper.callback_path),
args = args,
args_tuple = args_tuple,
connections_mod = connections_mod,
object_type_name = object_type_name,
func_args = func_args,
callback_args = callback_args,
)?;
}
RustStructKind::SizedType(_) => {
bail!("sized struct can't be generated with rust code generator")
}
}
if database.children(&rust_struct.path).next().is_some() {
writeln!(self, "impl {} {{", rust_struct.path.last())?;
self.generate_children(&rust_struct.path, database)?;
writeln!(self, "}}")?;
writeln!(self)?;
}
Ok(())
}
fn generate_enum_value(&mut self, value: &RustEnumValue) -> Result<()> {
write!(
self,
"{}",
format_doc(&doc_formatter::enum_value_doc(value))
)?;
let struct_path =
self.rust_path_to_string(&value.path.parent().expect("enum value must have parent"));
writeln!(self, "#[allow(non_upper_case_globals)]")?;
writeln!(
self,
"pub const {value_name}: {struct_path} = {struct_path}({value});",
value_name = value.path.last(),
struct_path = struct_path,
value = value.value
)?;
Ok(())
}
// TODO: generate relative paths for better readability
fn rust_path_to_string(&self, path: &RustPath) -> String {
path.full_name(Some(&self.crate_name))
}
/// Generates Rust code containing declaration of a FFI function `func`.
fn rust_ffi_function_to_code(&self, func: &RustFFIFunction) -> String {
let mut args = func.arguments.iter().map(|arg| {
format!(
"{}: {}",
arg.name,
self.rust_type_to_code(&arg.argument_type)
)
});
format!(
" pub fn {}({}){};\n",
func.path.last(),
args.join(", "),
match func.return_type {
RustType::Unit => String::new(),
_ => format!(" -> {}", self.rust_type_to_code(&func.return_type)),
}
)
}
/// Wraps `expression` of type `type1.rust_ffi_type` to convert
/// it to type `type1.rust_api_type`.
/// If `in_unsafe_context` is `true`, the output code will be placed inside
/// an `unsafe` block.
/// If `use_ffi_result_var` is `true`, the output code will assign
/// the value to a temporary variable `ffi_result` and return it.
fn convert_type_from_ffi(
&self,
type1: &RustFinalType,
expression: String,
in_unsafe_context: bool,
use_ffi_result_var: bool,
) -> Result<String> {
let (unsafe_start, unsafe_end) = if in_unsafe_context {
("", "")
} else {
("unsafe { ", " }")
};
if type1.api_to_ffi_conversion == RustToFfiTypeConversion::None {
return Ok(expression);
}
let (code1, source_expr) = if use_ffi_result_var {
(
format!("let ffi_result = {};\n", expression),
"ffi_result".to_string(),
)
} else {
(String::new(), expression)
};
let code2 = match type1.api_to_ffi_conversion {
RustToFfiTypeConversion::None => unreachable!(),
RustToFfiTypeConversion::RefToPtr => {
let api_is_const = type1.api_type.is_const_pointer_like()?;
let unwrap_code = ".expect(\"Attempted to convert null pointer to reference\")";
format!(
"{unsafe_start}{}.{}(){unsafe_end}{}",
source_expr,
if api_is_const { "as_ref" } else { "as_mut" },
unwrap_code,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
)
}
RustToFfiTypeConversion::ValueToPtr => format!(
"{unsafe_start}*{}{unsafe_end}",
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
),
RustToFfiTypeConversion::CppBoxToPtr => format!(
"{unsafe_start}::cpp_utils::CppBox::new({}){unsafe_end}",
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
),
RustToFfiTypeConversion::PtrWrapperToPtr
| RustToFfiTypeConversion::OptionPtrWrapperToPtr => {
let is_option =
type1.api_to_ffi_conversion == RustToFfiTypeConversion::OptionPtrWrapperToPtr;
let ptr_wrapper_type = if is_option {
type1
.api_type
.as_common()?
.generic_arguments
.as_ref()
.ok_or_else(|| err_msg("expected generic argument for Option"))?
.get(0)
.ok_or_else(|| err_msg("expected generic argument for Option"))?
} else {
&type1.api_type
};
let ptr_wrapper_path = &ptr_wrapper_type.as_common()?.path;
format!(
"{unsafe_start}{}::{}({}){unsafe_end}",
self.rust_path_to_string(ptr_wrapper_path),
if is_option { "new_option" } else { "new" },
source_expr,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
)
}
RustToFfiTypeConversion::QFlagsToUInt => {
let mut qflags_type = type1.api_type.clone();
if let RustType::Common(RustCommonType {
ref mut generic_arguments,
..
}) = qflags_type
{
*generic_arguments = None;
} else {
unreachable!();
}
format!(
"{}::from_int({} as i32)",
self.rust_type_to_code(&qflags_type),
source_expr
)
}
};
Ok(code1 + &code2)
}
/// Generates Rust code for calling an FFI function from a wrapper function.
/// If `in_unsafe_context` is `true`, the output code will be placed inside
/// an `unsafe` block.
fn generate_ffi_call(
&self,
arguments: &[RustFunctionArgument],
return_type: &RustFinalType,
wrapper_data: &RustFfiWrapperData,
in_unsafe_context: bool,
) -> Result<String> {
let (unsafe_start, unsafe_end) = if in_unsafe_context {
("", "")
} else {
("unsafe { ", " }")
};
let mut final_args = Vec::new();
final_args.resize(wrapper_data.cpp_ffi_function.arguments.len(), None);
let all_args: Vec<RustFunctionArgument> = Vec::from(arguments);
for arg in &all_args {
assert!(arg.ffi_index < final_args.len());
let mut code = arg.name.clone();
match arg.argument_type.api_to_ffi_conversion {
RustToFfiTypeConversion::None => {}
RustToFfiTypeConversion::OptionPtrWrapperToPtr => {
bail!("OptionRefToPtr is not supported here yet");
}
RustToFfiTypeConversion::RefToPtr => {
if arg.argument_type.api_type.is_const()?
&& !arg.argument_type.ffi_type.is_const()?
{
let mut intermediate_type = arg.argument_type.ffi_type.clone();
intermediate_type.set_const(true)?;
code = format!(
"{} as {} as {}",
code,
self.rust_type_to_code(&intermediate_type),
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
} else {
code = format!(
"{} as {}",
code,
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
}
}
RustToFfiTypeConversion::ValueToPtr => {
let is_const = arg.argument_type.ffi_type.is_const_pointer_like()?;
code = format!(
"{}{} as {}",
if is_const { "&" } else { "&mut " },
code,
self.rust_type_to_code(&arg.argument_type.ffi_type)
);
}
RustToFfiTypeConversion::CppBoxToPtr | RustToFfiTypeConversion::PtrWrapperToPtr => {
let is_const = arg.argument_type.ffi_type.is_const_pointer_like()?;
let method = if is_const { "as_ptr" } else { "as_mut_ptr" };
code = format!("{}.{}()", code, method);
}
RustToFfiTypeConversion::QFlagsToUInt => {
code = format!("{}.to_int() as ::libc::c_uint", code);
}
}
final_args[arg.ffi_index] = Some(code);
}
let mut result = Vec::new();
let mut maybe_result_var_name = None;
if let Some(ref i) = wrapper_data.return_type_ffi_index {
let mut return_var_name = "object".to_string();
let mut ii = 1;
while arguments.iter().any(|x| x.name == return_var_name) {
ii += 1;
return_var_name = format!("object{}", ii);
}
let struct_name =
if return_type.api_to_ffi_conversion == RustToFfiTypeConversion::CppBoxToPtr {
if let RustType::Common(RustCommonType {
ref generic_arguments,
..
}) = return_type.api_type
{
let generic_arguments = generic_arguments
.as_ref()
.ok_or_else(|| err_msg("CppBox must have generic_arguments"))?;
let arg = generic_arguments.get(0).ok_or_else(|| {
err_msg("CppBox must have non-empty generic_arguments")
})?;
self.rust_type_to_code(arg)
} else {
unexpected!("CppBox type expected");
}
} else {
self.rust_type_to_code(&return_type.api_type)
};
// TODO: use MybeUninit when it's stable
result.push(format!(
"{{\nlet mut {var}: {t} = {unsafe_start}\
::std::mem::uninitialized()\
{unsafe_end};\n",
var = return_var_name,
t = struct_name,
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
));
final_args[*i as usize] = Some(format!("&mut {}", return_var_name));
maybe_result_var_name = Some(return_var_name);
}
let final_args = final_args
.into_iter()
.map_if_ok(|x| x.ok_or_else(|| err_msg("ffi argument is missing")))?;
result.push(format!(
"{unsafe_start}{}({}){maybe_semicolon}{unsafe_end}",
self.rust_path_to_string(&wrapper_data.ffi_function_path),
final_args.join(", "),
maybe_semicolon = if maybe_result_var_name.is_some() {
";"
} else {
""
},
unsafe_start = unsafe_start,
unsafe_end = unsafe_end
));
if let Some(ref name) = maybe_result_var_name {
result.push(format!("{}\n}}", name));
}
let code = result.join("");
if maybe_result_var_name.is_none() {
self.convert_type_from_ffi(&return_type, code, in_unsafe_context, true)
} else {
Ok(code)
}
}
/// Generates Rust code for declaring a function's arguments.
fn arg_texts(&self, args: &[RustFunctionArgument], lifetime: Option<&String>) -> Vec<String> {
args.iter()
.map(|arg| {
if &arg.name == "self" {
let self_type = match lifetime {
Some(lifetime) => {
arg.argument_type.api_type.with_lifetime(lifetime.clone())
}
None => arg.argument_type.api_type.clone(),
};
match self_type {
RustType::Common { .. } => "self".to_string(),
RustType::PointerLike {
ref kind,
ref is_const,
..
} => {
if let RustPointerLikeTypeKind::Reference { ref lifetime } = *kind {
let maybe_mut = if *is_const { "" } else { "mut " };
match *lifetime {
Some(ref lifetime) => {
format!("&'{} {}self", lifetime, maybe_mut)
}
None => format!("&{}self", maybe_mut),
}
} else {
panic!("invalid self argument type (indirection)");
}
}
_ => {
panic!("invalid self argument type (not Common)");
}
}
} else {
let mut maybe_mut_declaration = "";
if let RustType::Common { .. } = arg.argument_type.api_type {
if arg.argument_type.api_to_ffi_conversion
== RustToFfiTypeConversion::ValueToPtr
{
if let RustType::PointerLike { ref is_const, .. } =
arg.argument_type.ffi_type
{
if !*is_const {
maybe_mut_declaration = "mut ";
}
}
}
}
format!(
"{}{}: {}",
maybe_mut_declaration,
arg.name,
match lifetime {
Some(lifetime) => self.rust_type_to_code(
&arg.argument_type.api_type.with_lifetime(lifetime.clone(),)
),
None => self.rust_type_to_code(&arg.argument_type.api_type),
}
)
}
})
.collect()
}
/// Generates complete code of a Rust wrapper function.
fn generate_rust_final_function(
&mut self,
func: &RustFunction,
is_in_trait_context: bool,
) -> Result<()> {
let maybe_pub = if func.is_public && !is_in_trait_context {
"pub "
} else {
""
};
let maybe_unsafe = if func.is_unsafe { "unsafe " } else { "" };
let body = match func.kind {
RustFunctionKind::FfiWrapper(ref data) => {
self.generate_ffi_call(&func.arguments, &func.return_type, data, func.is_unsafe)?
}
RustFunctionKind::CppDeletableImpl { ref deleter } => self.rust_path_to_string(deleter),
RustFunctionKind::SignalOrSlotGetter { .. } => unimplemented!(),
};
let return_type_for_signature = if func.return_type.api_type == RustType::Unit {
String::new()
} else {
format!(" -> {}", self.rust_type_to_code(&func.return_type.api_type))
};
let all_lifetimes: Vec<_> = func
.arguments
.iter()
.filter_map(|x| x.argument_type.api_type.lifetime())
.collect();
let lifetimes_text = if all_lifetimes.is_empty() {
String::new()
} else {
format!(
"<{}>",
all_lifetimes.iter().map(|x| format!("'{}", x)).join(", ")
)
};
writeln!(
self,
"{doc}{maybe_pub}{maybe_unsafe}fn {name}{lifetimes_text}({args}){return_type} \
{{\n{body}}}\n\n",
doc = format_doc(&doc_formatter::function_doc(&func)),
maybe_pub = maybe_pub,
maybe_unsafe = maybe_unsafe,
lifetimes_text = lifetimes_text,
name = func.path.last(),
args = self.arg_texts(&func.arguments, None).join(", "),
return_type = return_type_for_signature,
body = body
)?;
Ok(())
}
fn generate_children(&mut self, parent: &RustPath, database: &RustDatabase) -> Result<()> {
if database
.children(&parent)
.any(|item| item.kind.is_ffi_function())
{
writeln!(self, "extern \"C\" {{\n")?;
for item in database
.children(&parent)
.filter(|item| item.kind.is_ffi_function())
{
self.generate_item(item, database)?;
}
writeln!(self, "}}\n")?;
}
for item in database
.children(&parent)
.filter(|item| !item.kind.is_ffi_function())
{
self.generate_item(item, database)?;
}
// TODO: somehow add items from crate template
Ok(())
}
fn generate_trait_impl(&mut self, trait1: &RustTraitImpl) -> Result<()> {
let associated_types_text = trait1
.associated_types
.iter()
.map(|t| format!("type {} = {};", t.name, self.rust_type_to_code(&t.value)))
.join("\n");
writeln!(
self,
"impl {} for {} {{\n{}",
self.rust_type_to_code(&trait1.trait_type),
self.rust_type_to_code(&trait1.target_type),
associated_types_text,
)?;
for func in &trait1.functions {
self.generate_rust_final_function(func, true)?;
}
writeln!(self, "}}\n")?;
Ok(())
}
fn generate_ffi_function(&mut self, function: &RustFFIFunction) -> Result<()> {
writeln!(self, "{}", self.rust_ffi_function_to_code(function))?;
Ok(())
}
}
pub fn generate(
crate_name: &str,
database: &RustDatabase,
output_src_path: impl Into<PathBuf>,
) -> Result<()> {
let mut generator = Generator {
crate_name: crate_name.to_string(),
destination: Vec::new(),
output_src_path: output_src_path.into(),
};
let crate_root = database
.items
.iter()
.filter_map(|item| item.as_module_ref())
.find(|module| module.kind == RustModuleKind::CrateRoot)
.ok_or_else(|| err_msg("crate root not found"))?;
generator.generate_module(crate_root, database)?;
Ok(())
}
// TODO: reimplement impl FlaggableEnum
/*
if *is_flaggable {
r = r + &format!(
include_str!("../templates/crate/impl_flaggable.rs.in"),
name = type1.name.last_name()?,
trait_type = RustName::new(vec![
"qt_core".to_string(),
"flags".to_string(),
"FlaggableEnum".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()))
);
}
*/
// TODO: reimplement impl Receiver for raw slot wrapper
/*
if let Some(ref slot_wrapper) = *slot_wrapper {
let arg_texts: Vec<_> = slot_wrapper
.arguments
.iter()
.map(|t| self.rust_type_to_code(&t.rust_api_type))
.collect();
let args = arg_texts.join(", ");
let args_tuple = format!(
"{}{}",
args,
if arg_texts.len() == 1 { "," } else { "" }
);
let connections_mod = RustName::new(vec![
"qt_core".to_string(),
"connection".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()));
let object_type_name = RustName::new(vec![
"qt_core".to_string(),
"object".to_string(),
"Object".to_string(),
])?
.full_name(Some(&self.config.crate_properties.name()));
r.push_str(&format!(
include_str!(
"../templates/crate/extern_slot_impl_receiver.rs.in"
),
type_name = type1
.name
.full_name(Some(&self.config.crate_properties.name())),
args_tuple = args_tuple,
receiver_id = slot_wrapper.receiver_id,
connections_mod = connections_mod,
object_type_name = object_type_name
));
}
*/
|
// Copyright (c) 2017 Anatoly Ikorsky
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
use mysql_common::row::convert::FromRowError;
use mysql_common::{io::ReadMysqlExt, packets::parse_local_infile_packet};
use tokio::prelude::*;
use std::{borrow::Cow, marker::PhantomData, result::Result as StdResult, sync::Arc};
use crate::{
connection_like::Connection,
error::*,
prelude::{FromRow, Protocol},
Column, Row,
};
/// Result set metadata.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum ResultSetMeta {
/// Text result set, that may contain rows.
Text(Arc<[Column]>),
/// Binary result set, that may contain rows.
Binary(Arc<[Column]>),
/// Error result set.
Error(ServerError),
}
impl ResultSetMeta {
fn columns(&self) -> StdResult<&Arc<[Column]>, &ServerError> {
match self {
ResultSetMeta::Text(cols) | ResultSetMeta::Binary(cols) => Ok(cols),
ResultSetMeta::Error(err) => Err(err),
}
}
}
/// Result of a query or statement execution.
///
/// Represents an asynchronous query result, that may not be fully consumed. Note,
/// that unconsumed query results are dropped implicitly when corresponding connection
/// is dropped or queried.
#[derive(Debug)]
pub struct QueryResult<'a, 't: 'a, P> {
conn: Connection<'a, 't>,
__phantom: PhantomData<P>,
}
impl<'a, 't: 'a, P> QueryResult<'a, 't, P>
where
P: Protocol,
{
pub fn new<T: Into<Connection<'a, 't>>>(conn: T) -> Self {
QueryResult {
conn: conn.into(),
__phantom: PhantomData,
}
}
/// Returns `true` if this query result may contain rows.
///
/// If `false` then no rows possible for this query tesult (e.g. result of an UPDATE query).
fn has_rows(&self) -> bool {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|columns| columns.len() > 0).ok())
.unwrap_or(false)
}
/// `true` if there are no more rows nor result sets in this query.
pub fn is_empty(&self) -> bool {
!self.has_rows() && !self.conn.more_results_exists()
}
pub async fn next(&mut self) -> Result<Option<Row>> {
loop {
let columns = match self.conn.get_pending_result() {
Some(ResultSetMeta::Text(cols)) | Some(ResultSetMeta::Binary(cols)) => {
Ok(Some(cols.clone()))
}
Some(ResultSetMeta::Error(err)) => Err(Error::from(err.clone())),
None => Ok(None),
};
match columns {
Ok(Some(columns)) => {
if columns.is_empty() {
// Empty, but not yet consumed result set.
self.conn.set_pending_result(None);
} else {
// Not yet consumed non-empty result set.
let packet = match self.conn.read_packet().await {
Ok(packet) => packet,
Err(err) => {
// Next row contained an error. No more data will follow.
self.conn.set_pending_result(None);
return Err(err);
}
};
if P::is_last_result_set_packet(self.conn.capabilities(), &packet) {
// `packet` is a result set terminator.
self.conn.set_pending_result(None);
} else {
// `packet` is a result set row.
return Ok(Some(P::read_result_set_row(&packet, columns)?));
}
}
}
Ok(None) => {
// Consumed result set.
if self.conn.more_results_exists() {
// More data will follow.
self.conn.sync_seq_id();
self.conn.read_result_set::<P>(false).await?;
return Ok(None);
} else {
// The end of a query result.
return Ok(None);
}
}
Err(err) => {
// Error result set. No more data will follow.
self.conn.set_pending_result(None);
return Err(err);
}
}
}
}
/// Last insert id, if any.
pub fn last_insert_id(&self) -> Option<u64> {
self.conn.last_insert_id()
}
/// Number of affected rows as reported by the server, or `0`.
pub fn affected_rows(&self) -> u64 {
self.conn.affected_rows()
}
/// Text information as reported by the server, or an empty string.
pub fn info(&self) -> Cow<'_, str> {
self.conn.info()
}
/// Number of warnings as reported by the server, or `0`.
pub fn warnings(&self) -> u16 {
self.conn.get_warnings()
}
/// Collects the current result set of this query result.
///
/// It is parametrized by `R` and internally calls `R::from_row(Row)` on each row.
///
/// It will collect rows up to a neares result set boundary. This means that you should call
/// `collect` as many times as result sets in your query result. For example query
/// `SELECT 'foo'; SELECT 'foo', 'bar';` will produce `QueryResult` with two result sets in it.
/// One can use `QueryResult::is_empty` to make sure that there is no more result sets.
///
/// # Panic
///
/// It'll panic if any row isn't convertible to `R` (i.e. programmer error or unknown schema).
/// * In case of programmer error see [`FromRow`] docs;
/// * In case of unknown schema use [`QueryResult::try_collect`].
pub async fn collect<R>(&mut self) -> Result<Vec<R>>
where
R: FromRow + Send + 'static,
{
self.reduce(Vec::new(), |mut acc, row| {
acc.push(FromRow::from_row(row));
acc
})
.await
}
/// Collects the current result set of this query result.
///
/// It works the same way as [`QueryResult::collect`] but won't panic if row isn't convertible
/// to `R`.
pub async fn try_collect<R>(&mut self) -> Result<Vec<StdResult<R, FromRowError>>>
where
R: FromRow + Send + 'static,
{
self.reduce(Vec::new(), |mut acc, row| {
acc.push(FromRow::from_row_opt(row));
acc
})
.await
}
/// Collects the current result set of this query result and drops everything else.
///
/// # Panic
///
/// It'll panic if any row isn't convertible to `R` (i.e. programmer error or unknown schema).
/// * In case of programmer error see `FromRow` docs;
/// * In case of unknown schema use [`QueryResult::try_collect`].
pub async fn collect_and_drop<R>(mut self) -> Result<Vec<R>>
where
R: FromRow + Send + 'static,
{
let output = self.collect::<R>().await?;
self.drop_result().await?;
Ok(output)
}
/// Collects the current result set of this query result and drops everything else.
///
/// It works the same way as [`QueryResult::collect_and_drop`] but won't panic if row isn't
/// convertible to `R`.
pub async fn try_collect_and_drop<R>(mut self) -> Result<Vec<StdResult<R, FromRowError>>>
where
R: FromRow + Send + 'static,
{
let output = self.try_collect().await?;
self.drop_result().await?;
Ok(output)
}
/// Executes `fun` on every row of the current result set.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn for_each<F>(&mut self, mut fun: F) -> Result<()>
where
F: FnMut(Row),
{
if self.is_empty() {
Ok(())
} else {
while let Some(row) = self.next().await? {
fun(row);
}
Ok(())
}
}
/// Executes `fun` on every row of the current result set and drops everything else.
pub async fn for_each_and_drop<F>(mut self, fun: F) -> Result<()>
where
F: FnMut(Row),
{
self.for_each(fun).await?;
self.drop_result().await?;
Ok(())
}
/// Maps every row of the current result set to `U` using `fun`.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn map<F, U>(&mut self, mut fun: F) -> Result<Vec<U>>
where
F: FnMut(Row) -> U,
{
let mut acc = Vec::new();
while let Some(row) = self.next().await? {
acc.push(fun(crate::from_row(row)));
}
Ok(acc)
}
/// Map every row of the current result set to `U` using `fun` and drops everything else.
pub async fn map_and_drop<F, U>(mut self, fun: F) -> Result<Vec<U>>
where
F: FnMut(Row) -> U,
{
let rows = self.map(fun).await?;
self.drop_result().await?;
Ok(rows)
}
/// Reduces rows of the current result set to `U` using `fun`.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn reduce<T, F, U>(&mut self, mut init: U, mut fun: F) -> Result<U>
where
F: FnMut(U, T) -> U,
T: FromRow + Send + 'static,
{
while let Some(row) = self.next().await? {
init = fun(init, crate::from_row(row));
}
Ok(init)
}
/// Reduces rows of the current result set to `U` using `fun` and drops everything else.
pub async fn reduce_and_drop<T, F, U>(mut self, init: U, fun: F) -> Result<U>
where
F: FnMut(U, T) -> U,
T: FromRow + Send + 'static,
{
let acc = self.reduce(init, fun).await?;
self.drop_result().await?;
Ok(acc)
}
/// Drops this query result.
pub async fn drop_result(mut self) -> Result<()> {
loop {
while let Some(_) = self.next().await? {}
if self.conn.get_pending_result().is_none() {
break Ok(());
}
}
}
/// Returns a reference to a columns list of this query result.
///
/// Empty list means that this result set was never meant to contain rows.
pub fn columns_ref(&self) -> &[Column] {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|cols| &cols[..]).ok())
.unwrap_or_default()
}
/// Returns a copy of a columns list of this query result.
pub fn columns(&self) -> Option<Arc<[Column]>> {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|columns| columns.clone()).ok())
}
}
impl crate::Conn {
/// Will read result set and write pending result into `self` (if any).
pub(crate) async fn read_result_set<P>(&mut self, is_first_result_set: bool) -> Result<()>
where
P: Protocol,
{
let packet = match self.read_packet().await {
Ok(packet) => packet,
Err(err @ Error::Server(_)) if is_first_result_set => {
// shortcut to emit an error right to the caller of a query/execute
return Err(err);
}
Err(Error::Server(error)) => {
// error will be consumed as a part of a multi-result set
self.set_pending_result(Some(ResultSetMeta::Error(error)));
return Ok(());
}
Err(err) => {
// non-server errors are fatal
return Err(err);
}
};
match packet.get(0) {
Some(0x00) => {
self.set_pending_result(Some(P::result_set_meta(Arc::from(
Vec::new().into_boxed_slice(),
))));
}
Some(0xFB) => self.handle_local_infile::<P>(&*packet).await?,
_ => self.handle_result_set::<P>(&*packet).await?,
}
Ok(())
}
/// Will handle local infile packet.
pub(crate) async fn handle_local_infile<P>(&mut self, packet: &[u8]) -> Result<()>
where
P: Protocol,
{
let local_infile = parse_local_infile_packet(&*packet)?;
let (local_infile, handler) = match self.opts().local_infile_handler() {
Some(handler) => ((local_infile.into_owned(), handler)),
None => return Err(DriverError::NoLocalInfileHandler.into()),
};
let mut reader = handler.handle(local_infile.file_name_ref()).await?;
let mut buf = [0; 4096];
loop {
let read = reader.read(&mut buf[..]).await?;
self.write_packet(&buf[..read]).await?;
if read == 0 {
break;
}
}
self.read_packet().await?;
self.set_pending_result(Some(P::result_set_meta(Arc::from(
Vec::new().into_boxed_slice(),
))));
Ok(())
}
/// Helper that handles result set packet.
///
/// Requires that `packet` contains non-zero length-encoded integer.
pub(crate) async fn handle_result_set<P>(&mut self, mut packet: &[u8]) -> Result<()>
where
P: Protocol,
{
let column_count = packet.read_lenenc_int()?;
let columns = self.read_column_defs(column_count as usize).await?;
let meta = P::result_set_meta(Arc::from(columns.into_boxed_slice()));
self.set_pending_result(Some(meta));
Ok(())
}
}
Update docs
// Copyright (c) 2017 Anatoly Ikorsky
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
use mysql_common::row::convert::FromRowError;
use mysql_common::{io::ReadMysqlExt, packets::parse_local_infile_packet};
use tokio::prelude::*;
use std::{borrow::Cow, marker::PhantomData, result::Result as StdResult, sync::Arc};
use crate::{
connection_like::Connection,
error::*,
prelude::{FromRow, Protocol},
Column, Row,
};
/// Result set metadata.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum ResultSetMeta {
/// Text result set, that may contain rows.
Text(Arc<[Column]>),
/// Binary result set, that may contain rows.
Binary(Arc<[Column]>),
/// Error result set.
Error(ServerError),
}
impl ResultSetMeta {
fn columns(&self) -> StdResult<&Arc<[Column]>, &ServerError> {
match self {
ResultSetMeta::Text(cols) | ResultSetMeta::Binary(cols) => Ok(cols),
ResultSetMeta::Error(err) => Err(err),
}
}
}
/// Result of a query or statement execution.
///
/// Represents an asynchronous query result, that may not be fully consumed.
///
/// # Note
///
/// Unconsumed query results are dropped implicitly when corresponding connection
/// is dropped or queried. Also note, that in this case all remaining errors will be
/// emitted to the caller:
///
/// ```rust
/// # use mysql_async::test_misc::get_opts;
/// # #[tokio::main]
/// # async fn main() -> mysql_async::Result<()> {
/// use mysql_async::*;
/// use mysql_async::prelude::*;
/// let mut conn = Conn::new(get_opts()).await?;
///
/// // second result set will contain an error,
/// // but the first result set is ok, so this line will pass
/// conn.query_iter("DO 1; BLABLA;").await?;
/// // `QueryResult` was dropped withot being consumed
///
/// // driver must cleanup any unconsumed result to perform another query on `conn`,
/// // so this operation will be performed implicitly, but the unconsumed result
/// // contains an error claiming about 'BLABLA', so this error will be emitted here:
/// assert!(conn.query_iter("DO 1").await.unwrap_err().to_string().contains("BLABLA"));
///
/// # conn.disconnect().await }
/// ```
#[derive(Debug)]
pub struct QueryResult<'a, 't: 'a, P> {
conn: Connection<'a, 't>,
__phantom: PhantomData<P>,
}
impl<'a, 't: 'a, P> QueryResult<'a, 't, P>
where
P: Protocol,
{
pub fn new<T: Into<Connection<'a, 't>>>(conn: T) -> Self {
QueryResult {
conn: conn.into(),
__phantom: PhantomData,
}
}
/// Returns `true` if this query result may contain rows.
///
/// If `false` then no rows possible for this query tesult (e.g. result of an UPDATE query).
fn has_rows(&self) -> bool {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|columns| columns.len() > 0).ok())
.unwrap_or(false)
}
/// `true` if there are no more rows nor result sets in this query.
pub fn is_empty(&self) -> bool {
!self.has_rows() && !self.conn.more_results_exists()
}
pub async fn next(&mut self) -> Result<Option<Row>> {
loop {
let columns = match self.conn.get_pending_result() {
Some(ResultSetMeta::Text(cols)) | Some(ResultSetMeta::Binary(cols)) => {
Ok(Some(cols.clone()))
}
Some(ResultSetMeta::Error(err)) => Err(Error::from(err.clone())),
None => Ok(None),
};
match columns {
Ok(Some(columns)) => {
if columns.is_empty() {
// Empty, but not yet consumed result set.
self.conn.set_pending_result(None);
} else {
// Not yet consumed non-empty result set.
let packet = match self.conn.read_packet().await {
Ok(packet) => packet,
Err(err) => {
// Next row contained an error. No more data will follow.
self.conn.set_pending_result(None);
return Err(err);
}
};
if P::is_last_result_set_packet(self.conn.capabilities(), &packet) {
// `packet` is a result set terminator.
self.conn.set_pending_result(None);
} else {
// `packet` is a result set row.
return Ok(Some(P::read_result_set_row(&packet, columns)?));
}
}
}
Ok(None) => {
// Consumed result set.
if self.conn.more_results_exists() {
// More data will follow.
self.conn.sync_seq_id();
self.conn.read_result_set::<P>(false).await?;
return Ok(None);
} else {
// The end of a query result.
return Ok(None);
}
}
Err(err) => {
// Error result set. No more data will follow.
self.conn.set_pending_result(None);
return Err(err);
}
}
}
}
/// Last insert id, if any.
pub fn last_insert_id(&self) -> Option<u64> {
self.conn.last_insert_id()
}
/// Number of affected rows as reported by the server, or `0`.
pub fn affected_rows(&self) -> u64 {
self.conn.affected_rows()
}
/// Text information as reported by the server, or an empty string.
pub fn info(&self) -> Cow<'_, str> {
self.conn.info()
}
/// Number of warnings as reported by the server, or `0`.
pub fn warnings(&self) -> u16 {
self.conn.get_warnings()
}
/// Collects the current result set of this query result.
///
/// It is parametrized by `R` and internally calls `R::from_row(Row)` on each row.
///
/// It will collect rows up to a neares result set boundary. This means that you should call
/// `collect` as many times as result sets in your query result. For example query
/// `SELECT 'foo'; SELECT 'foo', 'bar';` will produce `QueryResult` with two result sets in it.
/// One can use `QueryResult::is_empty` to make sure that there is no more result sets.
///
/// # Panic
///
/// It'll panic if any row isn't convertible to `R` (i.e. programmer error or unknown schema).
/// * In case of programmer error see [`FromRow`] docs;
/// * In case of unknown schema use [`QueryResult::try_collect`].
pub async fn collect<R>(&mut self) -> Result<Vec<R>>
where
R: FromRow + Send + 'static,
{
self.reduce(Vec::new(), |mut acc, row| {
acc.push(FromRow::from_row(row));
acc
})
.await
}
/// Collects the current result set of this query result.
///
/// It works the same way as [`QueryResult::collect`] but won't panic if row isn't convertible
/// to `R`.
pub async fn try_collect<R>(&mut self) -> Result<Vec<StdResult<R, FromRowError>>>
where
R: FromRow + Send + 'static,
{
self.reduce(Vec::new(), |mut acc, row| {
acc.push(FromRow::from_row_opt(row));
acc
})
.await
}
/// Collects the current result set of this query result and drops everything else.
///
/// # Panic
///
/// It'll panic if any row isn't convertible to `R` (i.e. programmer error or unknown schema).
/// * In case of programmer error see `FromRow` docs;
/// * In case of unknown schema use [`QueryResult::try_collect`].
pub async fn collect_and_drop<R>(mut self) -> Result<Vec<R>>
where
R: FromRow + Send + 'static,
{
let output = self.collect::<R>().await?;
self.drop_result().await?;
Ok(output)
}
/// Collects the current result set of this query result and drops everything else.
///
/// It works the same way as [`QueryResult::collect_and_drop`] but won't panic if row isn't
/// convertible to `R`.
pub async fn try_collect_and_drop<R>(mut self) -> Result<Vec<StdResult<R, FromRowError>>>
where
R: FromRow + Send + 'static,
{
let output = self.try_collect().await?;
self.drop_result().await?;
Ok(output)
}
/// Executes `fun` on every row of the current result set.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn for_each<F>(&mut self, mut fun: F) -> Result<()>
where
F: FnMut(Row),
{
if self.is_empty() {
Ok(())
} else {
while let Some(row) = self.next().await? {
fun(row);
}
Ok(())
}
}
/// Executes `fun` on every row of the current result set and drops everything else.
pub async fn for_each_and_drop<F>(mut self, fun: F) -> Result<()>
where
F: FnMut(Row),
{
self.for_each(fun).await?;
self.drop_result().await?;
Ok(())
}
/// Maps every row of the current result set to `U` using `fun`.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn map<F, U>(&mut self, mut fun: F) -> Result<Vec<U>>
where
F: FnMut(Row) -> U,
{
let mut acc = Vec::new();
while let Some(row) = self.next().await? {
acc.push(fun(crate::from_row(row)));
}
Ok(acc)
}
/// Map every row of the current result set to `U` using `fun` and drops everything else.
pub async fn map_and_drop<F, U>(mut self, fun: F) -> Result<Vec<U>>
where
F: FnMut(Row) -> U,
{
let rows = self.map(fun).await?;
self.drop_result().await?;
Ok(rows)
}
/// Reduces rows of the current result set to `U` using `fun`.
///
/// It will stop on the nearest result set boundary (see `QueryResult::collect` docs).
pub async fn reduce<T, F, U>(&mut self, mut init: U, mut fun: F) -> Result<U>
where
F: FnMut(U, T) -> U,
T: FromRow + Send + 'static,
{
while let Some(row) = self.next().await? {
init = fun(init, crate::from_row(row));
}
Ok(init)
}
/// Reduces rows of the current result set to `U` using `fun` and drops everything else.
pub async fn reduce_and_drop<T, F, U>(mut self, init: U, fun: F) -> Result<U>
where
F: FnMut(U, T) -> U,
T: FromRow + Send + 'static,
{
let acc = self.reduce(init, fun).await?;
self.drop_result().await?;
Ok(acc)
}
/// Drops this query result.
pub async fn drop_result(mut self) -> Result<()> {
loop {
while let Some(_) = self.next().await? {}
if self.conn.get_pending_result().is_none() {
break Ok(());
}
}
}
/// Returns a reference to a columns list of this query result.
///
/// Empty list means that this result set was never meant to contain rows.
pub fn columns_ref(&self) -> &[Column] {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|cols| &cols[..]).ok())
.unwrap_or_default()
}
/// Returns a copy of a columns list of this query result.
pub fn columns(&self) -> Option<Arc<[Column]>> {
self.conn
.get_pending_result()
.and_then(|meta| meta.columns().map(|columns| columns.clone()).ok())
}
}
impl crate::Conn {
/// Will read result set and write pending result into `self` (if any).
pub(crate) async fn read_result_set<P>(&mut self, is_first_result_set: bool) -> Result<()>
where
P: Protocol,
{
let packet = match self.read_packet().await {
Ok(packet) => packet,
Err(err @ Error::Server(_)) if is_first_result_set => {
// shortcut to emit an error right to the caller of a query/execute
return Err(err);
}
Err(Error::Server(error)) => {
// error will be consumed as a part of a multi-result set
self.set_pending_result(Some(ResultSetMeta::Error(error)));
return Ok(());
}
Err(err) => {
// non-server errors are fatal
return Err(err);
}
};
match packet.get(0) {
Some(0x00) => {
self.set_pending_result(Some(P::result_set_meta(Arc::from(
Vec::new().into_boxed_slice(),
))));
}
Some(0xFB) => self.handle_local_infile::<P>(&*packet).await?,
_ => self.handle_result_set::<P>(&*packet).await?,
}
Ok(())
}
/// Will handle local infile packet.
pub(crate) async fn handle_local_infile<P>(&mut self, packet: &[u8]) -> Result<()>
where
P: Protocol,
{
let local_infile = parse_local_infile_packet(&*packet)?;
let (local_infile, handler) = match self.opts().local_infile_handler() {
Some(handler) => ((local_infile.into_owned(), handler)),
None => return Err(DriverError::NoLocalInfileHandler.into()),
};
let mut reader = handler.handle(local_infile.file_name_ref()).await?;
let mut buf = [0; 4096];
loop {
let read = reader.read(&mut buf[..]).await?;
self.write_packet(&buf[..read]).await?;
if read == 0 {
break;
}
}
self.read_packet().await?;
self.set_pending_result(Some(P::result_set_meta(Arc::from(
Vec::new().into_boxed_slice(),
))));
Ok(())
}
/// Helper that handles result set packet.
///
/// Requires that `packet` contains non-zero length-encoded integer.
pub(crate) async fn handle_result_set<P>(&mut self, mut packet: &[u8]) -> Result<()>
where
P: Protocol,
{
let column_count = packet.read_lenenc_int()?;
let columns = self.read_column_defs(column_count as usize).await?;
let meta = P::result_set_meta(Arc::from(columns.into_boxed_slice()));
self.set_pending_result(Some(meta));
Ok(())
}
}
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::num::{One, Zero, CheckedAdd};
use std::vec::bytes::{MutableByteVector, copy_memory};
/// Write a u64 into a vector, which must be 8 bytes long. The value is written in big-endian
/// format.
pub fn write_u64_be(dst: &mut[u8], input: u64) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be64;
assert!(dst.len() == 8);
unsafe {
let x: *mut i64 = transmute(dst.unsafe_mut_ref(0));
*x = to_be64(input as i64);
}
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian
/// format.
pub fn write_u32_be(dst: &mut[u8], input: u32) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(dst.len() == 4);
unsafe {
let x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
*x = to_be32(input as i32);
}
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in little-endian
/// format.
pub fn write_u32_le(dst: &mut[u8], input: u32) {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(dst.len() == 4);
unsafe {
let x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
*x = to_le32(input as i32);
}
}
/// Read a vector of bytes into a vector of u64s. The values are read in big-endian format.
pub fn read_u64v_be(dst: &mut[u64], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be64;
assert!(dst.len() * 8 == input.len());
unsafe {
let mut x: *mut i64 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i64 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_be64(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read a vector of bytes into a vector of u32s. The values are read in big-endian format.
pub fn read_u32v_be(dst: &mut[u32], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(dst.len() * 4 == input.len());
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_be32(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read a vector of bytes into a vector of u32s. The values are read in little-endian format.
pub fn read_u32v_le(dst: &mut[u32], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(dst.len() * 4 == input.len());
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_le32(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read the value of a vector of bytes as a u32 value in little-endian format.
pub fn read_u32_le(input: &[u8]) -> u32 {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(input.len() == 4);
unsafe {
let tmp: *i32 = transmute(input.unsafe_ref(0));
return to_le32(*tmp) as u32;
}
}
/// Read the value of a vector of bytes as a u32 value in big-endian format.
pub fn read_u32_be(input: &[u8]) -> u32 {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(input.len() == 4);
unsafe {
let tmp: *i32 = transmute(input.unsafe_ref(0));
return to_be32(*tmp) as u32;
}
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
#[inline(never)]
unsafe fn fixed_time_eq_asm(mut lhsp: *u8, mut rhsp: *u8, mut count: uint) -> bool {
use std::unstable::intrinsics::uninit;
let mut result: u8 = 0;
let mut tmp: u8 = uninit();
asm!(
"
fixed_time_eq_loop:
mov ($1), $4
xor ($2), $4
or $4, $0
inc $1
inc $2
dec $3
jnz fixed_time_eq_loop
"
: "=&r" (result), "=&r" (lhsp), "=&r" (rhsp), "=&r" (count), "=&r" (tmp) // output
: "0" (result), "1" (lhsp), "2" (rhsp), "3" (count), "4" (tmp) // input
: "cc" // clobbers
: // flags
);
return result == 0;
}
#[cfg(target_arch = "arm")]
#[inline(never)]
unsafe fn fixed_time_eq_asm(mut lhsp: *u8, mut rhsp: *u8, mut count: uint) -> bool {
use std::unstable::intrinsics::uninit;
let mut result: u8 = 0;
let mut tmp1: u8 = uninit();
let mut tmp2: u8 = uninit();
asm!(
"
fixed_time_eq_loop:
ldrb $4, [$1]
ldrb $5, [$2]
eor $4, $4, $5
orr $0, $0, $4
add $1, $1, #1
add $2, $2, #1
subs $3, $3, #1
bne fixed_time_eq_loop
"
// output
: "=&r" (result), "=&r" (lhsp), "=&r" (rhsp), "=&r" (count), "=&r" (tmp1), "=&r" (tmp2)
: "0" (result), "1" (lhsp), "2" (rhsp), "3" (count), "4" (tmp1), "5" (tmp2) // input
: "cc" // clobbers
: // flags
);
return result == 0;
}
/// Compare two vectors using a fixed number of operations. If the two vectors are not of equal
/// length, the function returns false immediately.
pub fn fixed_time_eq(lhs: &[u8], rhs: &[u8]) -> bool {
if lhs.len() != rhs.len() {
return false;
}
if lhs.len() == 0 {
return true;
}
let count = lhs.len();
unsafe {
let lhsp = lhs.unsafe_ref(0);
let rhsp = rhs.unsafe_ref(0);
return fixed_time_eq_asm(lhsp, rhsp, count);
}
}
trait ToBits {
/// Convert the value in bytes to the number of bits, a tuple where the 1st item is the
/// high-order value and the 2nd item is the low order value.
fn to_bits(self) -> (Self, Self);
}
impl ToBits for u64 {
fn to_bits(self) -> (u64, u64) {
return (self >> 61, self << 3);
}
}
/// Adds the specified number of bytes to the bit count. fail!() if this would cause numeric
/// overflow.
pub fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
let (new_high_bits, new_low_bits) = bytes.to_bits();
if new_high_bits > Zero::zero() {
fail!("Numeric overflow occured.")
}
match bits.checked_add(&new_low_bits) {
Some(x) => return x,
None => fail!("Numeric overflow occured.")
}
}
/// Adds the specified number of bytes to the bit count, which is a tuple where the first element is
/// the high order value. fail!() if this would cause numeric overflow.
pub fn add_bytes_to_bits_tuple
<T: Int + Unsigned + CheckedAdd + ToBits>
(bits: (T, T), bytes: T) -> (T, T) {
let (new_high_bits, new_low_bits) = bytes.to_bits();
let (hi, low) = bits;
// Add the low order value - if there is no overflow, then add the high order values
// If the addition of the low order values causes overflow, add one to the high order values
// before adding them.
match low.checked_add(&new_low_bits) {
Some(x) => {
if new_high_bits == Zero::zero() {
// This is the fast path - every other alternative will rarely occur in practice
// considering how large an input would need to be for those paths to be used.
return (hi, x);
} else {
match hi.checked_add(&new_high_bits) {
Some(y) => return (y, x),
None => fail!("Numeric overflow occured.")
}
}
},
None => {
let one: T = One::one();
let z = match new_high_bits.checked_add(&one) {
Some(w) => w,
None => fail!("Numeric overflow occured.")
};
match hi.checked_add(&z) {
// This re-executes the addition that was already performed earlier when overflow
// occured, this time allowing the overflow to happen. Technically, this could be
// avoided by using the checked add intrinsic directly, but that involves using
// unsafe code and is not really worthwhile considering how infrequently code will
// run in practice. This is the reason that this function requires that the type T
// be Unsigned - overflow is not defined for Signed types. This function could be
// implemented for signed types as well if that were needed.
Some(y) => return (y, low + new_low_bits),
None => fail!("Numeric overflow occured.")
}
}
}
}
/// A FixedBuffer, likes its name implies, is a fixed size buffer. When the buffer becomes full, it
/// must be processed. The input() method takes care of processing and then clearing the buffer
/// automatically. However, other methods do not and require the caller to process the buffer. Any
/// method that modifies the buffer directory or provides the caller with bytes that can be modifies
/// results in those bytes being marked as used by the buffer.
pub trait FixedBuffer {
/// Input a vector of bytes. If the buffer becomes full, process it with the provided
/// function and then clear the buffer.
fn input(&mut self, input: &[u8], func: &fn(&[u8]));
/// Reset the buffer.
fn reset(&mut self);
/// Zero the buffer up until the specified index. The buffer position currently must not be
/// greater than that index.
fn zero_until(&mut self, idx: uint);
/// Get a slice of the buffer of the specified size. There must be at least that many bytes
/// remaining in the buffer.
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8];
/// Get the current buffer. The buffer must already be full. This clears the buffer as well.
fn full_buffer<'s>(&'s mut self) -> &'s [u8];
/// Get the current position of the buffer.
fn position(&self) -> uint;
/// Get the number of bytes remaining in the buffer until it is full.
fn remaining(&self) -> uint;
/// Get the size of the buffer
fn size(&self) -> uint;
}
macro_rules! impl_fixed_buffer( ($name:ident, $size:expr) => (
impl FixedBuffer for $name {
fn input(&mut self, input: &[u8], func: &fn(&[u8])) {
let mut i = 0;
// FIXME: #6304 - This local variable shouldn't be necessary.
let size = $size;
// If there is already data in the buffer, copy as much as we can into it and process
// the data if the buffer becomes full.
if self.buffer_idx != 0 {
let buffer_remaining = size - self.buffer_idx;
if input.len() >= buffer_remaining {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, size),
input.slice_to(buffer_remaining),
buffer_remaining);
self.buffer_idx = 0;
func(self.buffer);
i += buffer_remaining;
} else {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, self.buffer_idx + input.len()),
input,
input.len());
self.buffer_idx += input.len();
return;
}
}
// While we have at least a full buffer size chunks's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
func(input.slice(i, i + size));
i += size;
}
// Copy any input data into the buffer. At this point in the method, the ammount of
// data left in the input vector will be less than the buffer size and the buffer will
// be empty.
let input_remaining = input.len() - i;
copy_memory(
self.buffer.mut_slice(0, input_remaining),
input.slice_from(i),
input.len() - i);
self.buffer_idx += input_remaining;
}
fn reset(&mut self) {
self.buffer_idx = 0;
}
fn zero_until(&mut self, idx: uint) {
assert!(idx >= self.buffer_idx);
self.buffer.mut_slice(self.buffer_idx, idx).set_memory(0);
self.buffer_idx = idx;
}
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8] {
self.buffer_idx += len;
return self.buffer.mut_slice(self.buffer_idx - len, self.buffer_idx);
}
fn full_buffer<'s>(&'s mut self) -> &'s [u8] {
assert!(self.buffer_idx == $size);
self.buffer_idx = 0;
return self.buffer.slice_to($size);
}
fn position(&self) -> uint { self.buffer_idx }
fn remaining(&self) -> uint { $size - self.buffer_idx }
fn size(&self) -> uint { $size }
}
))
/// A fixed size buffer of 64 bytes useful for cryptographic operations.
pub struct FixedBuffer64 {
priv buffer: [u8, ..64],
priv buffer_idx: uint,
}
impl FixedBuffer64 {
/// Create a new buffer
pub fn new() -> FixedBuffer64 {
return FixedBuffer64 {
buffer: [0u8, ..64],
buffer_idx: 0
};
}
}
impl_fixed_buffer!(FixedBuffer64, 64)
/// A fixed size buffer of 128 bytes useful for cryptographic operations.
pub struct FixedBuffer128 {
priv buffer: [u8, ..128],
priv buffer_idx: uint,
}
impl FixedBuffer128 {
/// Create a new buffer
pub fn new() -> FixedBuffer128 {
return FixedBuffer128 {
buffer: [0u8, ..128],
buffer_idx: 0
};
}
}
impl_fixed_buffer!(FixedBuffer128, 128)
/// The StandardPadding trait adds a method useful for various hash algorithms to a FixedBuffer
/// struct.
pub trait StandardPadding {
/// Add standard padding to the buffer. The buffer must not be full when this method is called
/// and is guaranteed to have exactly rem remaining bytes when it returns. If there are not at
/// least rem bytes available, the buffer will be zero padded, processed, cleared, and then
/// filled with zeros again until only rem bytes are remaining.
fn standard_padding(&mut self, rem: uint, func: &fn(&[u8]));
}
impl <T: FixedBuffer> StandardPadding for T {
fn standard_padding(&mut self, rem: uint, func: &fn(&[u8])) {
let size = self.size();
self.next(1)[0] = 128;
if self.remaining() < rem {
self.zero_until(size);
func(self.full_buffer());
}
self.zero_until(size - rem);
}
}
#[cfg(test)]
pub mod test {
use std::rand::{IsaacRng, Rng};
use std::vec;
use cryptoutil::{add_bytes_to_bits, add_bytes_to_bits_tuple, fixed_time_eq};
use digest::Digest;
/// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is
/// correct.
pub fn test_digest_1million_random<D: Digest>(digest: &mut D, blocksize: uint, expected: &str) {
let total_size = 1000000;
let buffer = vec::from_elem(blocksize * 2, 'a' as u8);
let mut rng = IsaacRng::new_unseeded();
let mut count = 0;
digest.reset();
while count < total_size {
let next: uint = rng.gen_integer_range(0, 2 * blocksize + 1);
let remaining = total_size - count;
let size = if next > remaining { remaining } else { next };
digest.input(buffer.slice_to(size));
count += size;
}
let result_str = digest.result_str();
assert!(expected == result_str);
}
// A normal addition - no overflow occurs
#[test]
fn test_add_bytes_to_bits_ok() {
assert!(add_bytes_to_bits::<u64>(100, 10) == 180);
}
// A simple failure case - adding 1 to the max value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_overflow() {
add_bytes_to_bits::<u64>(Bounded::max_value(), 1);
}
// A normal addition - no overflow occurs (fast path)
#[test]
fn test_add_bytes_to_bits_tuple_ok() {
assert!(add_bytes_to_bits_tuple::<u64>((5, 100), 10) == (5, 180));
}
// The low order value overflows into the high order value
#[test]
fn test_add_bytes_to_bits_tuple_ok2() {
assert!(add_bytes_to_bits_tuple::<u64>((5, Bounded::max_value()), 1) == (6, 7));
}
// The value to add is too large to be converted into bits without overflowing its type
#[test]
fn test_add_bytes_to_bits_tuple_ok3() {
assert!(add_bytes_to_bits_tuple::<u64>((5, 0), 0x4000000000000001) == (7, 8));
}
// A simple failure case - adding 1 to the max value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_tuple_overflow() {
add_bytes_to_bits_tuple::<u64>((Bounded::max_value(), Bounded::max_value()), 1);
}
// The value to add is too large to convert to bytes without overflowing its type, but the high
// order value from this conversion overflows when added to the existing high order value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_tuple_overflow2() {
let value: u64 = Bounded::max_value();
add_bytes_to_bits_tuple::<u64>((value - 1, 0), 0x8000000000000000);
}
#[test]
pub fn test_fixed_time_eq() {
let a = [0, 1, 2];
let b = [0, 1, 2];
let c = [0, 1, 9];
let d = [9, 1, 2];
let e = [2, 1, 0];
let f = [2, 2, 2];
let g = [0, 0, 0];
assert!(fixed_time_eq(a, a));
assert!(fixed_time_eq(a, b));
assert!(!fixed_time_eq(a, c));
assert!(!fixed_time_eq(a, d));
assert!(!fixed_time_eq(a, e));
assert!(!fixed_time_eq(a, f));
assert!(!fixed_time_eq(a, g));
}
}
Switch over to using std::rand::distributions::Range instead of gen_integer_range, which was removed from libstd.
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::num::{One, Zero, CheckedAdd};
use std::vec::bytes::{MutableByteVector, copy_memory};
/// Write a u64 into a vector, which must be 8 bytes long. The value is written in big-endian
/// format.
pub fn write_u64_be(dst: &mut[u8], input: u64) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be64;
assert!(dst.len() == 8);
unsafe {
let x: *mut i64 = transmute(dst.unsafe_mut_ref(0));
*x = to_be64(input as i64);
}
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian
/// format.
pub fn write_u32_be(dst: &mut[u8], input: u32) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(dst.len() == 4);
unsafe {
let x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
*x = to_be32(input as i32);
}
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written in little-endian
/// format.
pub fn write_u32_le(dst: &mut[u8], input: u32) {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(dst.len() == 4);
unsafe {
let x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
*x = to_le32(input as i32);
}
}
/// Read a vector of bytes into a vector of u64s. The values are read in big-endian format.
pub fn read_u64v_be(dst: &mut[u64], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be64;
assert!(dst.len() * 8 == input.len());
unsafe {
let mut x: *mut i64 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i64 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_be64(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read a vector of bytes into a vector of u32s. The values are read in big-endian format.
pub fn read_u32v_be(dst: &mut[u32], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(dst.len() * 4 == input.len());
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_be32(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read a vector of bytes into a vector of u32s. The values are read in little-endian format.
pub fn read_u32v_le(dst: &mut[u32], input: &[u8]) {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(dst.len() * 4 == input.len());
unsafe {
let mut x: *mut i32 = transmute(dst.unsafe_mut_ref(0));
let mut y: *i32 = transmute(input.unsafe_ref(0));
do dst.len().times() {
*x = to_le32(*y);
x = x.offset(1);
y = y.offset(1);
}
}
}
/// Read the value of a vector of bytes as a u32 value in little-endian format.
pub fn read_u32_le(input: &[u8]) -> u32 {
use std::cast::transmute;
use std::unstable::intrinsics::to_le32;
assert!(input.len() == 4);
unsafe {
let tmp: *i32 = transmute(input.unsafe_ref(0));
return to_le32(*tmp) as u32;
}
}
/// Read the value of a vector of bytes as a u32 value in big-endian format.
pub fn read_u32_be(input: &[u8]) -> u32 {
use std::cast::transmute;
use std::unstable::intrinsics::to_be32;
assert!(input.len() == 4);
unsafe {
let tmp: *i32 = transmute(input.unsafe_ref(0));
return to_be32(*tmp) as u32;
}
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
#[inline(never)]
unsafe fn fixed_time_eq_asm(mut lhsp: *u8, mut rhsp: *u8, mut count: uint) -> bool {
use std::unstable::intrinsics::uninit;
let mut result: u8 = 0;
let mut tmp: u8 = uninit();
asm!(
"
fixed_time_eq_loop:
mov ($1), $4
xor ($2), $4
or $4, $0
inc $1
inc $2
dec $3
jnz fixed_time_eq_loop
"
: "=&r" (result), "=&r" (lhsp), "=&r" (rhsp), "=&r" (count), "=&r" (tmp) // output
: "0" (result), "1" (lhsp), "2" (rhsp), "3" (count), "4" (tmp) // input
: "cc" // clobbers
: // flags
);
return result == 0;
}
#[cfg(target_arch = "arm")]
#[inline(never)]
unsafe fn fixed_time_eq_asm(mut lhsp: *u8, mut rhsp: *u8, mut count: uint) -> bool {
use std::unstable::intrinsics::uninit;
let mut result: u8 = 0;
let mut tmp1: u8 = uninit();
let mut tmp2: u8 = uninit();
asm!(
"
fixed_time_eq_loop:
ldrb $4, [$1]
ldrb $5, [$2]
eor $4, $4, $5
orr $0, $0, $4
add $1, $1, #1
add $2, $2, #1
subs $3, $3, #1
bne fixed_time_eq_loop
"
// output
: "=&r" (result), "=&r" (lhsp), "=&r" (rhsp), "=&r" (count), "=&r" (tmp1), "=&r" (tmp2)
: "0" (result), "1" (lhsp), "2" (rhsp), "3" (count), "4" (tmp1), "5" (tmp2) // input
: "cc" // clobbers
: // flags
);
return result == 0;
}
/// Compare two vectors using a fixed number of operations. If the two vectors are not of equal
/// length, the function returns false immediately.
pub fn fixed_time_eq(lhs: &[u8], rhs: &[u8]) -> bool {
if lhs.len() != rhs.len() {
return false;
}
if lhs.len() == 0 {
return true;
}
let count = lhs.len();
unsafe {
let lhsp = lhs.unsafe_ref(0);
let rhsp = rhs.unsafe_ref(0);
return fixed_time_eq_asm(lhsp, rhsp, count);
}
}
trait ToBits {
/// Convert the value in bytes to the number of bits, a tuple where the 1st item is the
/// high-order value and the 2nd item is the low order value.
fn to_bits(self) -> (Self, Self);
}
impl ToBits for u64 {
fn to_bits(self) -> (u64, u64) {
return (self >> 61, self << 3);
}
}
/// Adds the specified number of bytes to the bit count. fail!() if this would cause numeric
/// overflow.
pub fn add_bytes_to_bits<T: Int + CheckedAdd + ToBits>(bits: T, bytes: T) -> T {
let (new_high_bits, new_low_bits) = bytes.to_bits();
if new_high_bits > Zero::zero() {
fail!("Numeric overflow occured.")
}
match bits.checked_add(&new_low_bits) {
Some(x) => return x,
None => fail!("Numeric overflow occured.")
}
}
/// Adds the specified number of bytes to the bit count, which is a tuple where the first element is
/// the high order value. fail!() if this would cause numeric overflow.
pub fn add_bytes_to_bits_tuple
<T: Int + Unsigned + CheckedAdd + ToBits>
(bits: (T, T), bytes: T) -> (T, T) {
let (new_high_bits, new_low_bits) = bytes.to_bits();
let (hi, low) = bits;
// Add the low order value - if there is no overflow, then add the high order values
// If the addition of the low order values causes overflow, add one to the high order values
// before adding them.
match low.checked_add(&new_low_bits) {
Some(x) => {
if new_high_bits == Zero::zero() {
// This is the fast path - every other alternative will rarely occur in practice
// considering how large an input would need to be for those paths to be used.
return (hi, x);
} else {
match hi.checked_add(&new_high_bits) {
Some(y) => return (y, x),
None => fail!("Numeric overflow occured.")
}
}
},
None => {
let one: T = One::one();
let z = match new_high_bits.checked_add(&one) {
Some(w) => w,
None => fail!("Numeric overflow occured.")
};
match hi.checked_add(&z) {
// This re-executes the addition that was already performed earlier when overflow
// occured, this time allowing the overflow to happen. Technically, this could be
// avoided by using the checked add intrinsic directly, but that involves using
// unsafe code and is not really worthwhile considering how infrequently code will
// run in practice. This is the reason that this function requires that the type T
// be Unsigned - overflow is not defined for Signed types. This function could be
// implemented for signed types as well if that were needed.
Some(y) => return (y, low + new_low_bits),
None => fail!("Numeric overflow occured.")
}
}
}
}
/// A FixedBuffer, likes its name implies, is a fixed size buffer. When the buffer becomes full, it
/// must be processed. The input() method takes care of processing and then clearing the buffer
/// automatically. However, other methods do not and require the caller to process the buffer. Any
/// method that modifies the buffer directory or provides the caller with bytes that can be modifies
/// results in those bytes being marked as used by the buffer.
pub trait FixedBuffer {
/// Input a vector of bytes. If the buffer becomes full, process it with the provided
/// function and then clear the buffer.
fn input(&mut self, input: &[u8], func: &fn(&[u8]));
/// Reset the buffer.
fn reset(&mut self);
/// Zero the buffer up until the specified index. The buffer position currently must not be
/// greater than that index.
fn zero_until(&mut self, idx: uint);
/// Get a slice of the buffer of the specified size. There must be at least that many bytes
/// remaining in the buffer.
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8];
/// Get the current buffer. The buffer must already be full. This clears the buffer as well.
fn full_buffer<'s>(&'s mut self) -> &'s [u8];
/// Get the current position of the buffer.
fn position(&self) -> uint;
/// Get the number of bytes remaining in the buffer until it is full.
fn remaining(&self) -> uint;
/// Get the size of the buffer
fn size(&self) -> uint;
}
macro_rules! impl_fixed_buffer( ($name:ident, $size:expr) => (
impl FixedBuffer for $name {
fn input(&mut self, input: &[u8], func: &fn(&[u8])) {
let mut i = 0;
// FIXME: #6304 - This local variable shouldn't be necessary.
let size = $size;
// If there is already data in the buffer, copy as much as we can into it and process
// the data if the buffer becomes full.
if self.buffer_idx != 0 {
let buffer_remaining = size - self.buffer_idx;
if input.len() >= buffer_remaining {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, size),
input.slice_to(buffer_remaining),
buffer_remaining);
self.buffer_idx = 0;
func(self.buffer);
i += buffer_remaining;
} else {
copy_memory(
self.buffer.mut_slice(self.buffer_idx, self.buffer_idx + input.len()),
input,
input.len());
self.buffer_idx += input.len();
return;
}
}
// While we have at least a full buffer size chunks's worth of data, process that data
// without copying it into the buffer
while input.len() - i >= size {
func(input.slice(i, i + size));
i += size;
}
// Copy any input data into the buffer. At this point in the method, the ammount of
// data left in the input vector will be less than the buffer size and the buffer will
// be empty.
let input_remaining = input.len() - i;
copy_memory(
self.buffer.mut_slice(0, input_remaining),
input.slice_from(i),
input.len() - i);
self.buffer_idx += input_remaining;
}
fn reset(&mut self) {
self.buffer_idx = 0;
}
fn zero_until(&mut self, idx: uint) {
assert!(idx >= self.buffer_idx);
self.buffer.mut_slice(self.buffer_idx, idx).set_memory(0);
self.buffer_idx = idx;
}
fn next<'s>(&'s mut self, len: uint) -> &'s mut [u8] {
self.buffer_idx += len;
return self.buffer.mut_slice(self.buffer_idx - len, self.buffer_idx);
}
fn full_buffer<'s>(&'s mut self) -> &'s [u8] {
assert!(self.buffer_idx == $size);
self.buffer_idx = 0;
return self.buffer.slice_to($size);
}
fn position(&self) -> uint { self.buffer_idx }
fn remaining(&self) -> uint { $size - self.buffer_idx }
fn size(&self) -> uint { $size }
}
))
/// A fixed size buffer of 64 bytes useful for cryptographic operations.
pub struct FixedBuffer64 {
priv buffer: [u8, ..64],
priv buffer_idx: uint,
}
impl FixedBuffer64 {
/// Create a new buffer
pub fn new() -> FixedBuffer64 {
return FixedBuffer64 {
buffer: [0u8, ..64],
buffer_idx: 0
};
}
}
impl_fixed_buffer!(FixedBuffer64, 64)
/// A fixed size buffer of 128 bytes useful for cryptographic operations.
pub struct FixedBuffer128 {
priv buffer: [u8, ..128],
priv buffer_idx: uint,
}
impl FixedBuffer128 {
/// Create a new buffer
pub fn new() -> FixedBuffer128 {
return FixedBuffer128 {
buffer: [0u8, ..128],
buffer_idx: 0
};
}
}
impl_fixed_buffer!(FixedBuffer128, 128)
/// The StandardPadding trait adds a method useful for various hash algorithms to a FixedBuffer
/// struct.
pub trait StandardPadding {
/// Add standard padding to the buffer. The buffer must not be full when this method is called
/// and is guaranteed to have exactly rem remaining bytes when it returns. If there are not at
/// least rem bytes available, the buffer will be zero padded, processed, cleared, and then
/// filled with zeros again until only rem bytes are remaining.
fn standard_padding(&mut self, rem: uint, func: &fn(&[u8]));
}
impl <T: FixedBuffer> StandardPadding for T {
fn standard_padding(&mut self, rem: uint, func: &fn(&[u8])) {
let size = self.size();
self.next(1)[0] = 128;
if self.remaining() < rem {
self.zero_until(size);
func(self.full_buffer());
}
self.zero_until(size - rem);
}
}
#[cfg(test)]
pub mod test {
use std::rand::IsaacRng;
use std::rand::distributions::{IndependentSample, Range};
use std::vec;
use cryptoutil::{add_bytes_to_bits, add_bytes_to_bits_tuple, fixed_time_eq};
use digest::Digest;
/// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is
/// correct.
pub fn test_digest_1million_random<D: Digest>(digest: &mut D, blocksize: uint, expected: &str) {
let total_size = 1000000;
let buffer = vec::from_elem(blocksize * 2, 'a' as u8);
let mut rng = IsaacRng::new_unseeded();
let range = Range::new(0, 2 * blocksize + 1);
let mut count = 0;
digest.reset();
while count < total_size {
let next = range.ind_sample(&mut rng);
let remaining = total_size - count;
let size = if next > remaining { remaining } else { next };
digest.input(buffer.slice_to(size));
count += size;
}
let result_str = digest.result_str();
assert!(expected == result_str);
}
// A normal addition - no overflow occurs
#[test]
fn test_add_bytes_to_bits_ok() {
assert!(add_bytes_to_bits::<u64>(100, 10) == 180);
}
// A simple failure case - adding 1 to the max value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_overflow() {
add_bytes_to_bits::<u64>(Bounded::max_value(), 1);
}
// A normal addition - no overflow occurs (fast path)
#[test]
fn test_add_bytes_to_bits_tuple_ok() {
assert!(add_bytes_to_bits_tuple::<u64>((5, 100), 10) == (5, 180));
}
// The low order value overflows into the high order value
#[test]
fn test_add_bytes_to_bits_tuple_ok2() {
assert!(add_bytes_to_bits_tuple::<u64>((5, Bounded::max_value()), 1) == (6, 7));
}
// The value to add is too large to be converted into bits without overflowing its type
#[test]
fn test_add_bytes_to_bits_tuple_ok3() {
assert!(add_bytes_to_bits_tuple::<u64>((5, 0), 0x4000000000000001) == (7, 8));
}
// A simple failure case - adding 1 to the max value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_tuple_overflow() {
add_bytes_to_bits_tuple::<u64>((Bounded::max_value(), Bounded::max_value()), 1);
}
// The value to add is too large to convert to bytes without overflowing its type, but the high
// order value from this conversion overflows when added to the existing high order value
#[test]
#[should_fail]
fn test_add_bytes_to_bits_tuple_overflow2() {
let value: u64 = Bounded::max_value();
add_bytes_to_bits_tuple::<u64>((value - 1, 0), 0x8000000000000000);
}
#[test]
pub fn test_fixed_time_eq() {
let a = [0, 1, 2];
let b = [0, 1, 2];
let c = [0, 1, 9];
let d = [9, 1, 2];
let e = [2, 1, 0];
let f = [2, 2, 2];
let g = [0, 0, 0];
assert!(fixed_time_eq(a, a));
assert!(fixed_time_eq(a, b));
assert!(!fixed_time_eq(a, c));
assert!(!fixed_time_eq(a, d));
assert!(!fixed_time_eq(a, e));
assert!(!fixed_time_eq(a, f));
assert!(!fixed_time_eq(a, g));
}
}
|
extern crate "parser-combinators" as parser_combinators;
use self::parser_combinators::{try, between, spaces, string, parser, many, many1, digit, optional, hex_digit, not_followed_by, satisfy, Parser, ParserExt, ParseResult};
use self::parser_combinators::primitives::{State, Stream};
use super::ast::*;
use super::ast::ExprNode::*;
use std::str::FromStr;
use std::num::FromStrRadix;
/// Parser for signed integer constants.
///
/// This parses signed integer constants in decimal and hexadecimal.
///
/// TODO: add support for octal
/// TODO: add support for binary
/// TODO: add support for R6RS exponents
fn sint_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
fn hex_string<I>(input: State<I>) -> ParseResult<isize, I>
where I: Stream<Item=char> {
(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'x' || c == 'X')))
.with(many1::<Vec<_>, _>(hex_digit()))
.map(|x| {
isize::from_str_radix(
x.iter()
.fold(
String::new(),
|mut s: String, i| { s.push(*i); s })
.as_slice(),
16).unwrap()
}).parse_state(input)
}
fn dec_string<I>(input: State<I>) -> ParseResult<isize, I>
where I: Stream<Item=char> {
optional(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'd' || c == 'D')))
.with(many1::<Vec<_>, _>(digit())
.map(|x| isize::from_str(x.iter().fold(
String::new(), |mut s: String, i| { s.push(*i); s })
.as_slice()
).unwrap()
)).parse_state(input)
}
optional(satisfy(|c| c == '-'))
.and(
try(parser(hex_string))
.or(parser(dec_string))
)
.map(|x| {
if let Some(sign) = x.0 {
let mut s = String::new();
s.push(sign);
s.push('1');
x.1 * isize::from_str(s.as_slice()).unwrap()
} else {
x.1
}
})
.skip(not_followed_by(satisfy(|c|
c == 'u' || c == 'U' || c == '.' || c == 'f' || c == 'F')
))
.map(|x: isize| NumNode::IntConst(IntNode{value: x}))
.parse_state(input)
}
/// Parser for unsigned integer constants.
///
/// This parses unssigned integer constants in decimal and hexadecimal.
///
/// TODO: add support for octal
/// TODO: add support for binary
/// TODO: add support for R6RS exponents
fn uint_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
fn hex_string<I>(input: State<I>) -> ParseResult<usize, I>
where I: Stream<Item=char> {
(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'x' || c == 'X')))
.with(many1::<Vec<_>, _>(hex_digit()))
.map(|x| usize::from_str_radix(
x.iter()
.fold(
String::new(),
|mut s: String, i| { s.push(*i); s })
.as_slice(),
16).unwrap()
).parse_state(input)
}
try(parser(hex_string))
.or( many1::<Vec<_>, _>(digit())
.map(|x|usize::from_str(x.iter().fold(
String::new(), |mut s: String, i| { s.push(*i); s })
.as_slice()
).unwrap())
)
.skip(satisfy(|c| c == 'u' || c == 'U'))
.map(|x: usize| NumNode::UIntConst(UIntNode{value: x}))
.parse_state(input)
}
/// Parser for floating-point constants.
///
/// This parses floating-point constants. Currently, this parser
/// recognizes numbers with decimal points as floating point, followed
/// by an optional `f` or `F`. Numbers with `f`s but no decimal points,
/// i.e. `1F`, are currently not recognized. While this form of number
/// is not specified by R6RS, I'd like to support it anyway as it's
/// a common form for floating-point numbers. Priority: low.
fn float_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
many1::<Vec<_>, _>(digit())
.and(satisfy(|c| c == '.'))
.and(many1::<Vec<_>, _>(digit()))
.map(|x| {
let mut s = String::new();
for i in (x.0).0.iter() { s.push(*i); } ;
s.push((x.0).1);
for i in x.1.iter() { s.push(*i); };
NumNode::FloatConst(FloatNode{
value: f64::from_str(s.as_slice()).unwrap()
})
})
.skip(optional(satisfy(|c| c == 'f' || c == 'F')))
.parse_state(input)
}
/// Parses boolean constants.
///
/// Note that this parser recognizes the strings `"true"` and `"false"`
/// as true and false. While this is not specified in R6RS, the use of
/// these tokens is common enough in other programming languages that
/// I've decided that Seax Scheme should support it as well. This may
/// be removed in a future version if it causes unforseen compatibility
/// issues.
///
/// `#t`, `#T`, or `true` -> `true`
/// `#f`, `#F`, or `false` -> `false`
pub fn bool_const<I>(input: State<I>) -> ParseResult<BoolNode, I>
where I: Stream<Item=char> {
let t_const = try(string("#t"))
.or(try(string("#T")))
.or(try(string("true")))
.map(|_| BoolNode{ value: true });
let f_const = try(string("#f"))
.or(try(string("#F")))
.or(try(string("false")))
.map(|_| BoolNode{ value: false });
t_const
.or(f_const)
.parse_state(input)
}
/// Parses a floating-point, signed integer, or unsigned integer constant.
pub fn number<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
try(parser(sint_const))
.or(try(parser(uint_const)))
.or(try(parser(float_const)))
.parse_state(input)
}
/// Parser for valid R6RS identifiers.
///
/// An identifier may begin with an alphabetic character or
/// one of the following special characters `!`, `$`, `&`, `:`, `^`,
/// `<`, `>`, `_`,`~`,`\`, or `?`. Subsequent characters may also include
/// numbers or the special characters `+`, `-`, `.`, and `@`.
///
/// Essentially, this parser recognizes the regular expression
/// `[a-zA-Z!\$%:\^<>_~\\\?][a-zA-Z0-9!\$%:\^<>_~\\\?\+\-\.@]*`.
///
/// For more information, consult the
/// [R6RS](http://www.r6rs.org/final/html/r6rs/r6rs-Z-H-7.html).
pub fn name<I>(input: State<I>) -> ParseResult<NameNode, I>
where I: Stream<Item=char> {
fn initial<I>(input: State<I>) -> ParseResult<char, I>
where I: Stream<Item=char> {
satisfy(|c|
c.is_alphabetic()
// R6RS 'special initial' characters
|| c == '!' || c == '$' || c == '%' || c == ':' || c == '^'
|| c == '<' || c == '>' || c == '_' || c == '~' || c == '\\'
|| c == '?'
).parse_state(input)
}
fn subsequent<I>(input: State<I>) -> ParseResult<char, I>
where I: Stream<Item=char> {
satisfy(|c|
c.is_alphanumeric()
// R6RS 'special initial' characters
|| c == '!' || c == '$' || c == '%' || c == ':' || c == '^'
|| c == '<' || c == '>' || c == '_' || c == '~' || c == '\\'
|| c == '?'
// R6RS 'special subsequent' characters
|| c == '+' || c == '-' || c == '.' || c == '@'
).parse_state(input)
}
fn rest<I>(input: State<I>) -> ParseResult<String, I>
where I: Stream<Item=char> {
many::<Vec<_>, _>(parser(subsequent))
.map(|it|
it.iter().fold(
String::new(),
|mut s: String, i| {
s.push(*i);
s
})
)
.parse_state(input)
}
parser(initial)
.and(parser(rest))
.parse_state(input)
.map(|x| {
let mut s = String::new();
s.push((x.0).0);
s.push_str(&(x.0).1);
(NameNode{ name: s}, x.1)
})
}
/// Parses Scheme expressions.
#[allow(unconditional_recursion)]
pub fn expr<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
let spaces = spaces();
fn sexpr<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
between(
satisfy(|c| c == '('),
satisfy(|c| c == ')'),
parser(name)
.and(many(parser(expr)))
.map(|x| {
SExpr(SExprNode {
operator: x.0,
operands: x.1
})
})
).parse_state(input)
}
fn list<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
between(
satisfy(|c| c == '('),
satisfy(|c| c == ')'),
many(parser(expr))
.map(|x| {
ListConst(ListNode {
elements: x
})
})
).parse_state(input)
}
spaces.clone().with(
try(parser(sexpr))
.or(try(parser(list)))
.or(try(parser(name).map(Name)))
.or(try(parser(number).map(NumConst)))
).parse_state(input)
}
#[cfg(test)]
mod tests;
Implemented first set of char parsing tests
extern crate "parser-combinators" as parser_combinators;
use self::parser_combinators::{try, between, spaces, string, parser, many, many1, digit, any_char, optional, hex_digit, not_followed_by, satisfy, Parser, ParserExt, ParseResult};
use self::parser_combinators::primitives::{State, Stream};
use super::ast::*;
use super::ast::ExprNode::*;
use std::str::FromStr;
use std::num::FromStrRadix;
/// Parser for signed integer constants.
///
/// This parses signed integer constants in decimal and hexadecimal.
///
/// TODO: add support for octal
/// TODO: add support for binary
/// TODO: add support for R6RS exponents
fn sint_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
fn hex_string<I>(input: State<I>) -> ParseResult<isize, I>
where I: Stream<Item=char> {
(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'x' || c == 'X')))
.with(many1::<Vec<_>, _>(hex_digit()))
.map(|x| {
isize::from_str_radix(
x.iter()
.fold(
String::new(),
|mut s: String, i| { s.push(*i); s })
.as_slice(),
16).unwrap()
}).parse_state(input)
}
fn dec_string<I>(input: State<I>) -> ParseResult<isize, I>
where I: Stream<Item=char> {
optional(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'd' || c == 'D')))
.with(many1::<Vec<_>, _>(digit())
.map(|x| isize::from_str(x.iter().fold(
String::new(), |mut s: String, i| { s.push(*i); s })
.as_slice()
).unwrap()
)).parse_state(input)
}
optional(satisfy(|c| c == '-'))
.and(
try(parser(hex_string))
.or(parser(dec_string))
)
.map(|x| {
if let Some(sign) = x.0 {
let mut s = String::new();
s.push(sign);
s.push('1');
x.1 * isize::from_str(s.as_slice()).unwrap()
} else {
x.1
}
})
.skip(not_followed_by(satisfy(|c|
c == 'u' || c == 'U' || c == '.' || c == 'f' || c == 'F')
))
.map(|x: isize| NumNode::IntConst(IntNode{value: x}))
.parse_state(input)
}
/// Parser for unsigned integer constants.
///
/// This parses unssigned integer constants in decimal and hexadecimal.
///
/// TODO: add support for octal
/// TODO: add support for binary
/// TODO: add support for R6RS exponents
fn uint_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
fn hex_string<I>(input: State<I>) -> ParseResult<usize, I>
where I: Stream<Item=char> {
(satisfy(|c| c == '#')
.and(satisfy(|c| c == 'x' || c == 'X')))
.with(many1::<Vec<_>, _>(hex_digit()))
.map(|x| usize::from_str_radix(
x.iter()
.fold(
String::new(),
|mut s: String, i| { s.push(*i); s })
.as_slice(),
16).unwrap()
).parse_state(input)
}
try(parser(hex_string))
.or( many1::<Vec<_>, _>(digit())
.map(|x|usize::from_str(x.iter().fold(
String::new(), |mut s: String, i| { s.push(*i); s })
.as_slice()
).unwrap())
)
.skip(satisfy(|c| c == 'u' || c == 'U'))
.map(|x: usize| NumNode::UIntConst(UIntNode{value: x}))
.parse_state(input)
}
/// Parser for floating-point constants.
///
/// This parses floating-point constants. Currently, this parser
/// recognizes numbers with decimal points as floating point, followed
/// by an optional `f` or `F`. Numbers with `f`s but no decimal points,
/// i.e. `1F`, are currently not recognized. While this form of number
/// is not specified by R6RS, I'd like to support it anyway as it's
/// a common form for floating-point numbers. Priority: low.
fn float_const<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
many1::<Vec<_>, _>(digit())
.and(satisfy(|c| c == '.'))
.and(many1::<Vec<_>, _>(digit()))
.map(|x| {
let mut s = String::new();
for i in (x.0).0.iter() { s.push(*i); } ;
s.push((x.0).1);
for i in x.1.iter() { s.push(*i); };
NumNode::FloatConst(FloatNode{
value: f64::from_str(s.as_slice()).unwrap()
})
})
.skip(optional(satisfy(|c| c == 'f' || c == 'F')))
.parse_state(input)
}
/// Parses boolean constants.
///
/// Note that this parser recognizes the strings `"true"` and `"false"`
/// as true and false. While this is not specified in R6RS, the use of
/// these tokens is common enough in other programming languages that
/// I've decided that Seax Scheme should support it as well. This may
/// be removed in a future version if it causes unforseen compatibility
/// issues.
///
/// `#t`, `#T`, or `true` -> `true`
/// `#f`, `#F`, or `false` -> `false`
pub fn bool_const<I>(input: State<I>) -> ParseResult<BoolNode, I>
where I: Stream<Item=char> {
let t_const = try(string("#t"))
.or(try(string("#T")))
.or(try(string("true")))
.map(|_| BoolNode{ value: true });
let f_const = try(string("#f"))
.or(try(string("#F")))
.or(try(string("false")))
.map(|_| BoolNode{ value: false });
t_const
.or(f_const)
.parse_state(input)
}
/// Parses a floating-point, signed integer, or unsigned integer constant.
pub fn number<I>(input: State<I>) -> ParseResult<NumNode, I>
where I: Stream<Item=char> {
try(parser(sint_const))
.or(try(parser(uint_const)))
.or(try(parser(float_const)))
.parse_state(input)
}
/// Parser for valid R6RS identifiers.
///
/// An identifier may begin with an alphabetic character or
/// one of the following special characters `!`, `$`, `&`, `:`, `^`,
/// `<`, `>`, `_`,`~`,`\`, or `?`. Subsequent characters may also include
/// numbers or the special characters `+`, `-`, `.`, and `@`.
///
/// Essentially, this parser recognizes the regular expression
/// `[a-zA-Z!\$%:\^<>_~\\\?][a-zA-Z0-9!\$%:\^<>_~\\\?\+\-\.@]*`.
///
/// For more information, consult the
/// [R6RS](http://www.r6rs.org/final/html/r6rs/r6rs-Z-H-7.html).
pub fn name<I>(input: State<I>) -> ParseResult<NameNode, I>
where I: Stream<Item=char> {
fn initial<I>(input: State<I>) -> ParseResult<char, I>
where I: Stream<Item=char> {
satisfy(|c|
c.is_alphabetic()
// R6RS 'special initial' characters
|| c == '!' || c == '$' || c == '%' || c == ':' || c == '^'
|| c == '<' || c == '>' || c == '_' || c == '~' || c == '\\'
|| c == '?'
).parse_state(input)
}
fn subsequent<I>(input: State<I>) -> ParseResult<char, I>
where I: Stream<Item=char> {
satisfy(|c|
c.is_alphanumeric()
// R6RS 'special initial' characters
|| c == '!' || c == '$' || c == '%' || c == ':' || c == '^'
|| c == '<' || c == '>' || c == '_' || c == '~' || c == '\\'
|| c == '?'
// R6RS 'special subsequent' characters
|| c == '+' || c == '-' || c == '.' || c == '@'
).parse_state(input)
}
fn rest<I>(input: State<I>) -> ParseResult<String, I>
where I: Stream<Item=char> {
many::<Vec<_>, _>(parser(subsequent))
.map(|it|
it.iter().fold(
String::new(),
|mut s: String, i| {
s.push(*i);
s
})
)
.parse_state(input)
}
parser(initial)
.and(parser(rest))
.parse_state(input)
.map(|x| {
let mut s = String::new();
s.push((x.0).0);
s.push_str(&(x.0).1);
(NameNode{ name: s}, x.1)
})
}
pub fn character<I> (input: State<I>) -> ParseResult<CharNode, I>
where I: Stream<Item=char> {
fn char_name<I> (input: State<I>) -> ParseResult<char, I>
where I: Stream<Item=char> {
try(string("tab").map(|_| '\t'))
.or(try(string("newline")).map(|_| '\n'))
.parse_state(input)
}
string("#\\")
.with(
parser(char_name)
.or(parser(any_char))
).map(|c| CharNode { value: c})
.parse_state(input)
}
/// Parses Scheme expressions.
#[allow(unconditional_recursion)]
pub fn expr<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
let spaces = spaces();
fn sexpr<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
between(
satisfy(|c| c == '('),
satisfy(|c| c == ')'),
parser(name)
.and(many(parser(expr)))
.map(|x| {
SExpr(SExprNode {
operator: x.0,
operands: x.1
})
})
).parse_state(input)
}
fn list<I>(input: State<I>) -> ParseResult<ExprNode, I>
where I: Stream<Item=char> {
between(
satisfy(|c| c == '('),
satisfy(|c| c == ')'),
many(parser(expr))
.map(|x| {
ListConst(ListNode {
elements: x
})
})
).parse_state(input)
}
spaces.clone().with(
try(parser(sexpr))
.or(try(parser(list)))
.or(try(parser(name).map(Name)))
.or(try(parser(number).map(NumConst)))
).parse_state(input)
}
#[cfg(test)]
mod tests; |
use std::collections::{BTreeMap, HashMap};
use std::net;
use std::path::{Path, PathBuf};
use std::str;
use token::{self, Token};
extern crate fnv;
use self::fnv::FnvHasher;
//////////////////////////////////////////////////////////////////////////
#[derive(Serialize)]
struct UnitStruct;
#[derive(Serialize)]
struct TupleStruct(i32, i32, i32);
#[derive(Serialize)]
struct Struct {
a: i32,
b: i32,
c: i32,
}
#[derive(Serialize)]
enum Enum {
Unit,
One(i32),
Seq(i32, i32),
Map { a: i32, b: i32 },
}
//////////////////////////////////////////////////////////////////////////
declare_ser_tests! {
test_unit {
() => &[Token::Unit],
}
test_bool {
true => &[Token::Bool(true)],
false => &[Token::Bool(false)],
}
test_isizes {
0isize => &[Token::Isize(0)],
0i8 => &[Token::I8(0)],
0i16 => &[Token::I16(0)],
0i32 => &[Token::I32(0)],
0i64 => &[Token::I64(0)],
}
test_usizes {
0usize => &[Token::Usize(0)],
0u8 => &[Token::U8(0)],
0u16 => &[Token::U16(0)],
0u32 => &[Token::U32(0)],
0u64 => &[Token::U64(0)],
}
test_floats {
0f32 => &[Token::F32(0.)],
0f64 => &[Token::F64(0.)],
}
test_char {
'a' => &[Token::Char('a')],
}
test_str {
"abc" => &[Token::Str("abc")],
"abc".to_owned() => &[Token::Str("abc")],
}
test_option {
None::<i32> => &[Token::Option(false)],
Some(1) => &[
Token::Option(true),
Token::I32(1),
],
}
test_result {
Ok::<i32, i32>(0) => &[
Token::EnumNewType("Result", "Ok"),
Token::I32(0),
],
Err::<i32, i32>(1) => &[
Token::EnumNewType("Result", "Err"),
Token::I32(1),
],
}
test_slice {
&[0][..0] => &[
Token::SeqStart(Some(0)),
Token::SeqEnd,
],
&[1, 2, 3][..] => &[
Token::SeqStart(Some(3)),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
],
}
test_array {
[0; 0] => &[
Token::SeqArrayStart(0),
Token::SeqEnd,
],
[1, 2, 3] => &[
Token::SeqArrayStart(3),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
],
}
test_vec {
Vec::<isize>::new() => &[
Token::SeqStart(Some(0)),
Token::SeqEnd,
],
vec![vec![], vec![1], vec![2, 3]] => &[
Token::SeqStart(Some(3)),
Token::SeqSep,
Token::SeqStart(Some(0)),
Token::SeqEnd,
Token::SeqSep,
Token::SeqStart(Some(1)),
Token::SeqSep,
Token::I32(1),
Token::SeqEnd,
Token::SeqSep,
Token::SeqStart(Some(2)),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
Token::SeqEnd,
],
}
test_tuple {
(1,) => &[
Token::TupleStart(1),
Token::TupleSep,
Token::I32(1),
Token::TupleEnd,
],
(1, 2, 3) => &[
Token::TupleStart(3),
Token::TupleSep,
Token::I32(1),
Token::TupleSep,
Token::I32(2),
Token::TupleSep,
Token::I32(3),
Token::TupleEnd,
],
}
test_btreemap {
btreemap![1 => 2] => &[
Token::MapStart(Some(1)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
btreemap![1 => 2, 3 => 4] => &[
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapSep,
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
btreemap![1 => btreemap![], 2 => btreemap![3 => 4, 5 => 6]] => &[
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(1),
Token::MapStart(Some(0)),
Token::MapEnd,
Token::MapSep,
Token::I32(2),
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(3),
Token::I32(4),
Token::MapSep,
Token::I32(5),
Token::I32(6),
Token::MapEnd,
Token::MapEnd,
],
}
test_hashmap {
hashmap![FnvHasher @ 1 => 2, 3 => 4] => &[
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapSep,
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
}
test_unit_struct {
UnitStruct => &[Token::UnitStruct("UnitStruct")],
}
test_tuple_struct {
TupleStruct(1, 2, 3) => &[
Token::TupleStructStart("TupleStruct", Some(3)),
Token::TupleStructSep,
Token::I32(1),
Token::TupleStructSep,
Token::I32(2),
Token::TupleStructSep,
Token::I32(3),
Token::TupleStructEnd,
],
}
test_struct {
Struct { a: 1, b: 2, c: 3 } => &[
Token::StructStart("Struct", Some(3)),
Token::StructSep,
Token::Str("a"),
Token::I32(1),
Token::StructSep,
Token::Str("b"),
Token::I32(2),
Token::StructSep,
Token::Str("c"),
Token::I32(3),
Token::StructEnd,
],
}
test_enum {
Enum::Unit => &[Token::EnumUnit("Enum", "Unit")],
Enum::One(42) => &[Token::EnumNewType("Enum", "One"), Token::I32(42)],
Enum::Seq(1, 2) => &[
Token::EnumSeqStart("Enum", "Seq", Some(2)),
Token::EnumSeqSep,
Token::I32(1),
Token::EnumSeqSep,
Token::I32(2),
Token::EnumSeqEnd,
],
Enum::Map { a: 1, b: 2 } => &[
Token::EnumMapStart("Enum", "Map", Some(2)),
Token::EnumMapSep,
Token::Str("a"),
Token::I32(1),
Token::EnumMapSep,
Token::Str("b"),
Token::I32(2),
Token::EnumMapEnd,
],
}
test_box {
Box::new(0i32) => &[Token::I32(0)],
}
test_boxed_slice {
Box::new([0, 1, 2]) => &[
Token::SeqArrayStart(3),
Token::SeqSep,
Token::I32(0),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqEnd,
],
}
test_net_ipv4addr {
"1.2.3.4".parse::<net::Ipv4Addr>().unwrap() => &[Token::Str("1.2.3.4")],
}
test_net_ipv6addr {
"::1".parse::<net::Ipv6Addr>().unwrap() => &[Token::Str("::1")],
}
test_net_socketaddr {
"1.2.3.4:1234".parse::<net::SocketAddr>().unwrap() => &[Token::Str("1.2.3.4:1234")],
"1.2.3.4:1234".parse::<net::SocketAddrV4>().unwrap() => &[Token::Str("1.2.3.4:1234")],
"[::1]:1234".parse::<net::SocketAddrV6>().unwrap() => &[Token::Str("[::1]:1234")],
}
test_path {
Path::new("/usr/local/lib") => &[
Token::Str("/usr/local/lib"),
],
}
test_path_buf {
PathBuf::from("/usr/local/lib") => &[
Token::Str("/usr/local/lib"),
],
}
}
#[cfg(feature = "nightly")]
#[test]
fn test_net_ipaddr() {
assert_ser_tokens(
"1.2.3.4".parse::<net::IpAddr>().unwrap(),
&[Token::Str("1.2.3.4")],
);
}
#[test]
fn test_cannot_serialize_paths() {
let path = unsafe {
str::from_utf8_unchecked(b"Hello \xF0\x90\x80World")
};
token::assert_ser_tokens_error(
&Path::new(path),
&[Token::Str("Hello �World")],
token::Error::InvalidValue("Path contains invalid UTF-8 characters".to_owned()));
let mut path_buf = PathBuf::new();
path_buf.push(path);
token::assert_ser_tokens_error(
&path_buf,
&[Token::Str("Hello �World")],
token::Error::InvalidValue("Path contains invalid UTF-8 characters".to_owned()));
}
Add ser tests for normal HashMap
use std::collections::{BTreeMap, HashMap};
use std::net;
use std::path::{Path, PathBuf};
use std::str;
use token::{self, Token};
extern crate fnv;
use self::fnv::FnvHasher;
//////////////////////////////////////////////////////////////////////////
#[derive(Serialize)]
struct UnitStruct;
#[derive(Serialize)]
struct TupleStruct(i32, i32, i32);
#[derive(Serialize)]
struct Struct {
a: i32,
b: i32,
c: i32,
}
#[derive(Serialize)]
enum Enum {
Unit,
One(i32),
Seq(i32, i32),
Map { a: i32, b: i32 },
}
//////////////////////////////////////////////////////////////////////////
declare_ser_tests! {
test_unit {
() => &[Token::Unit],
}
test_bool {
true => &[Token::Bool(true)],
false => &[Token::Bool(false)],
}
test_isizes {
0isize => &[Token::Isize(0)],
0i8 => &[Token::I8(0)],
0i16 => &[Token::I16(0)],
0i32 => &[Token::I32(0)],
0i64 => &[Token::I64(0)],
}
test_usizes {
0usize => &[Token::Usize(0)],
0u8 => &[Token::U8(0)],
0u16 => &[Token::U16(0)],
0u32 => &[Token::U32(0)],
0u64 => &[Token::U64(0)],
}
test_floats {
0f32 => &[Token::F32(0.)],
0f64 => &[Token::F64(0.)],
}
test_char {
'a' => &[Token::Char('a')],
}
test_str {
"abc" => &[Token::Str("abc")],
"abc".to_owned() => &[Token::Str("abc")],
}
test_option {
None::<i32> => &[Token::Option(false)],
Some(1) => &[
Token::Option(true),
Token::I32(1),
],
}
test_result {
Ok::<i32, i32>(0) => &[
Token::EnumNewType("Result", "Ok"),
Token::I32(0),
],
Err::<i32, i32>(1) => &[
Token::EnumNewType("Result", "Err"),
Token::I32(1),
],
}
test_slice {
&[0][..0] => &[
Token::SeqStart(Some(0)),
Token::SeqEnd,
],
&[1, 2, 3][..] => &[
Token::SeqStart(Some(3)),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
],
}
test_array {
[0; 0] => &[
Token::SeqArrayStart(0),
Token::SeqEnd,
],
[1, 2, 3] => &[
Token::SeqArrayStart(3),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
],
}
test_vec {
Vec::<isize>::new() => &[
Token::SeqStart(Some(0)),
Token::SeqEnd,
],
vec![vec![], vec![1], vec![2, 3]] => &[
Token::SeqStart(Some(3)),
Token::SeqSep,
Token::SeqStart(Some(0)),
Token::SeqEnd,
Token::SeqSep,
Token::SeqStart(Some(1)),
Token::SeqSep,
Token::I32(1),
Token::SeqEnd,
Token::SeqSep,
Token::SeqStart(Some(2)),
Token::SeqSep,
Token::I32(2),
Token::SeqSep,
Token::I32(3),
Token::SeqEnd,
Token::SeqEnd,
],
}
test_tuple {
(1,) => &[
Token::TupleStart(1),
Token::TupleSep,
Token::I32(1),
Token::TupleEnd,
],
(1, 2, 3) => &[
Token::TupleStart(3),
Token::TupleSep,
Token::I32(1),
Token::TupleSep,
Token::I32(2),
Token::TupleSep,
Token::I32(3),
Token::TupleEnd,
],
}
test_btreemap {
btreemap![1 => 2] => &[
Token::MapStart(Some(1)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
btreemap![1 => 2, 3 => 4] => &[
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapSep,
Token::I32(3),
Token::I32(4),
Token::MapEnd,
],
btreemap![1 => btreemap![], 2 => btreemap![3 => 4, 5 => 6]] => &[
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(1),
Token::MapStart(Some(0)),
Token::MapEnd,
Token::MapSep,
Token::I32(2),
Token::MapStart(Some(2)),
Token::MapSep,
Token::I32(3),
Token::I32(4),
Token::MapSep,
Token::I32(5),
Token::I32(6),
Token::MapEnd,
Token::MapEnd,
],
}
test_hashmap {
HashMap::<isize, isize>::new() => &[
Token::MapStart(Some(0)),
Token::MapEnd,
],
hashmap![1 => 2] => &[
Token::MapStart(Some(1)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
hashmap![FnvHasher @ 1 => 2] => &[
Token::MapStart(Some(1)),
Token::MapSep,
Token::I32(1),
Token::I32(2),
Token::MapEnd,
],
}
test_unit_struct {
UnitStruct => &[Token::UnitStruct("UnitStruct")],
}
test_tuple_struct {
TupleStruct(1, 2, 3) => &[
Token::TupleStructStart("TupleStruct", Some(3)),
Token::TupleStructSep,
Token::I32(1),
Token::TupleStructSep,
Token::I32(2),
Token::TupleStructSep,
Token::I32(3),
Token::TupleStructEnd,
],
}
test_struct {
Struct { a: 1, b: 2, c: 3 } => &[
Token::StructStart("Struct", Some(3)),
Token::StructSep,
Token::Str("a"),
Token::I32(1),
Token::StructSep,
Token::Str("b"),
Token::I32(2),
Token::StructSep,
Token::Str("c"),
Token::I32(3),
Token::StructEnd,
],
}
test_enum {
Enum::Unit => &[Token::EnumUnit("Enum", "Unit")],
Enum::One(42) => &[Token::EnumNewType("Enum", "One"), Token::I32(42)],
Enum::Seq(1, 2) => &[
Token::EnumSeqStart("Enum", "Seq", Some(2)),
Token::EnumSeqSep,
Token::I32(1),
Token::EnumSeqSep,
Token::I32(2),
Token::EnumSeqEnd,
],
Enum::Map { a: 1, b: 2 } => &[
Token::EnumMapStart("Enum", "Map", Some(2)),
Token::EnumMapSep,
Token::Str("a"),
Token::I32(1),
Token::EnumMapSep,
Token::Str("b"),
Token::I32(2),
Token::EnumMapEnd,
],
}
test_box {
Box::new(0i32) => &[Token::I32(0)],
}
test_boxed_slice {
Box::new([0, 1, 2]) => &[
Token::SeqArrayStart(3),
Token::SeqSep,
Token::I32(0),
Token::SeqSep,
Token::I32(1),
Token::SeqSep,
Token::I32(2),
Token::SeqEnd,
],
}
test_net_ipv4addr {
"1.2.3.4".parse::<net::Ipv4Addr>().unwrap() => &[Token::Str("1.2.3.4")],
}
test_net_ipv6addr {
"::1".parse::<net::Ipv6Addr>().unwrap() => &[Token::Str("::1")],
}
test_net_socketaddr {
"1.2.3.4:1234".parse::<net::SocketAddr>().unwrap() => &[Token::Str("1.2.3.4:1234")],
"1.2.3.4:1234".parse::<net::SocketAddrV4>().unwrap() => &[Token::Str("1.2.3.4:1234")],
"[::1]:1234".parse::<net::SocketAddrV6>().unwrap() => &[Token::Str("[::1]:1234")],
}
test_path {
Path::new("/usr/local/lib") => &[
Token::Str("/usr/local/lib"),
],
}
test_path_buf {
PathBuf::from("/usr/local/lib") => &[
Token::Str("/usr/local/lib"),
],
}
}
#[cfg(feature = "nightly")]
#[test]
fn test_net_ipaddr() {
assert_ser_tokens(
"1.2.3.4".parse::<net::IpAddr>().unwrap(),
&[Token::Str("1.2.3.4")],
);
}
#[test]
fn test_cannot_serialize_paths() {
let path = unsafe {
str::from_utf8_unchecked(b"Hello \xF0\x90\x80World")
};
token::assert_ser_tokens_error(
&Path::new(path),
&[Token::Str("Hello �World")],
token::Error::InvalidValue("Path contains invalid UTF-8 characters".to_owned()));
let mut path_buf = PathBuf::new();
path_buf.push(path);
token::assert_ser_tokens_error(
&path_buf,
&[Token::Str("Hello �World")],
token::Error::InvalidValue("Path contains invalid UTF-8 characters".to_owned()));
}
|
use bson::Document;
use bson::Bson::{FloatingPoint, I32};
use bson::Bson::String as BsonString;
use mongodb::client::wire_protocol::flags::{OpInsertFlags, OpQueryFlags};
use mongodb::client::wire_protocol::operations::Message;
use std::io::Write;
use std::net::TcpStream;
// #[test]
fn insert_single_key_doc() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc = Document::new();
doc.insert("foo".to_owned(), FloatingPoint(42.0));
let docs = vec![doc];
let flags = OpInsertFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(_) => panic!("Could not create message!")
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 1);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
// #[test]
fn insert_multi_key_doc() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc = Document::new();
doc.insert("foo".to_owned(), FloatingPoint(42.0));
doc.insert("bar".to_owned(), BsonString("__z&".to_owned()));
let docs = vec![doc];
let flags = OpInsertFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 1);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
match docs[0].get("bar") {
Some(&BsonString(ref s)) => assert_eq!(s, "__z&"),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
// #[test]
fn insert_docs() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc1 = Document::new();
doc1.insert("foo".to_owned(), FloatingPoint(42.0));
doc1.insert("bar".to_owned(), BsonString("__z&".to_owned()));
let mut doc2 = Document::new();
doc2.insert("booyah".to_owned(), I32(23));
let docs = vec![doc1, doc2];
let flags = OpInsertFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.test".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 2);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
match docs[0].get("bar") {
Some(&BsonString(ref s)) => assert_eq!(s, "__z&"),
_ => panic!("Wrong value returned!")
};
match docs[1].get("booyah") {
Some(&I32(23)) => (),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
changed tests to use separate column families
use bson::Document;
use bson::Bson::{FloatingPoint, I32};
use bson::Bson::String as BsonString;
use mongodb::client::wire_protocol::flags::{OpInsertFlags, OpQueryFlags};
use mongodb::client::wire_protocol::operations::Message;
use std::io::Write;
use std::net::TcpStream;
#[test]
fn insert_single_key_doc() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc = Document::new();
doc.insert("foo".to_owned(), FloatingPoint(42.0));
let docs = vec![doc];
let flags = OpInsertFlags::no_flags();
let name = "test.single_key".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(_) => panic!("Could not create message!")
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.single_key".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 1);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
#[test]
fn insert_multi_key_doc() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc = Document::new();
doc.insert("foo".to_owned(), FloatingPoint(42.0));
doc.insert("bar".to_owned(), BsonString("__z&".to_owned()));
let docs = vec![doc];
let flags = OpInsertFlags::no_flags();
let name = "test.multi_key".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.multi_key".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 1);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
match docs[0].get("bar") {
Some(&BsonString(ref s)) => assert_eq!(s, "__z&"),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
#[test]
fn insert_docs() {
match TcpStream::connect("localhost:27017") {
Ok(mut stream) => {
let mut doc1 = Document::new();
doc1.insert("foo".to_owned(), FloatingPoint(42.0));
doc1.insert("bar".to_owned(), BsonString("__z&".to_owned()));
let mut doc2 = Document::new();
doc2.insert("booyah".to_owned(), I32(23));
let docs = vec![doc1, doc2];
let flags = OpInsertFlags::no_flags();
let name = "test.multi_doc".to_owned();
let res = Message::with_insert(1, flags, name, docs);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let doc = Document::new();
let flags = OpQueryFlags::no_flags();
let name = "test.multi_doc".to_owned();
let res = Message::with_query(1, flags, name, 0, 0, doc, None);
let cm = match res {
Ok(message) => message,
Err(s) => panic!("{}", s)
};
match cm.write(&mut stream) {
Ok(_) => (),
Err(s) => panic!("{}", s)
};
let reply = match Message::read(&mut stream) {
Ok(m) => m,
Err(s) => panic!("{}", s)
};
let docs = match reply {
Message::OpReply { header: _, flags: _, cursor_id:_,
starting_from: _, number_returned: _,
documents: d } => d,
_ => panic!("Invalid response read from server")
};
assert_eq!(docs.len() as i32, 2);
match docs[0].get("foo") {
Some(&FloatingPoint(42.0)) => (),
_ => panic!("Wrong value returned!")
};
match docs[0].get("bar") {
Some(&BsonString(ref s)) => assert_eq!(s, "__z&"),
_ => panic!("Wrong value returned!")
};
match docs[1].get("booyah") {
Some(&I32(23)) => (),
_ => panic!("Wrong value returned!")
};
},
Err(_) => {
panic!("Could not connect to server")
}
}
}
|
use crate::infer::type_variable::TypeVariableOriginKind;
use crate::infer::InferCtxt;
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Namespace};
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::{Body, Expr, ExprKind, FnRetTy, HirId, Local, Pat};
use rustc_middle::hir::map::Map;
use rustc_middle::infer::unify_key::ConstVariableOriginKind;
use rustc_middle::ty::print::Print;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
use rustc_middle::ty::{self, DefIdTree, InferConst, Ty};
use rustc_span::source_map::DesugaringKind;
use rustc_span::symbol::kw;
use rustc_span::Span;
use std::borrow::Cow;
struct FindHirNodeVisitor<'a, 'tcx> {
infcx: &'a InferCtxt<'a, 'tcx>,
target: GenericArg<'tcx>,
target_span: Span,
found_node_ty: Option<Ty<'tcx>>,
found_local_pattern: Option<&'tcx Pat<'tcx>>,
found_arg_pattern: Option<&'tcx Pat<'tcx>>,
found_closure: Option<&'tcx Expr<'tcx>>,
found_method_call: Option<&'tcx Expr<'tcx>>,
found_exact_method_call: Option<&'tcx Expr<'tcx>>,
}
impl<'a, 'tcx> FindHirNodeVisitor<'a, 'tcx> {
fn new(infcx: &'a InferCtxt<'a, 'tcx>, target: GenericArg<'tcx>, target_span: Span) -> Self {
Self {
infcx,
target,
target_span,
found_node_ty: None,
found_local_pattern: None,
found_arg_pattern: None,
found_closure: None,
found_method_call: None,
found_exact_method_call: None,
}
}
fn node_ty_contains_target(&mut self, hir_id: HirId) -> Option<Ty<'tcx>> {
let ty_opt = self
.infcx
.in_progress_typeck_results
.and_then(|typeck_results| typeck_results.borrow().node_type_opt(hir_id));
match ty_opt {
Some(ty) => {
let ty = self.infcx.resolve_vars_if_possible(&ty);
if ty.walk().any(|inner| {
inner == self.target
|| match (inner.unpack(), self.target.unpack()) {
(GenericArgKind::Type(inner_ty), GenericArgKind::Type(target_ty)) => {
match (inner_ty.kind(), target_ty.kind()) {
(
&ty::Infer(ty::TyVar(a_vid)),
&ty::Infer(ty::TyVar(b_vid)),
) => self
.infcx
.inner
.borrow_mut()
.type_variables()
.sub_unified(a_vid, b_vid),
_ => false,
}
}
_ => false,
}
}) {
Some(ty)
} else {
None
}
}
None => None,
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for FindHirNodeVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.infcx.tcx.hir())
}
fn visit_local(&mut self, local: &'tcx Local<'tcx>) {
if let (None, Some(ty)) =
(self.found_local_pattern, self.node_ty_contains_target(local.hir_id))
{
// FIXME: There's a trade-off here - we can either check that our target span
// is contained in `local.span` or not. If we choose to check containment
// we can avoid some spurious suggestions (see #72690), but we lose
// the ability to report on things like:
//
// ```
// let x = vec![];
// ```
//
// because the target span will be in the macro expansion of `vec![]`.
// At present we choose not to check containment.
self.found_local_pattern = Some(&*local.pat);
self.found_node_ty = Some(ty);
}
intravisit::walk_local(self, local);
}
fn visit_body(&mut self, body: &'tcx Body<'tcx>) {
for param in body.params {
if let (None, Some(ty)) =
(self.found_arg_pattern, self.node_ty_contains_target(param.hir_id))
{
if self.target_span.contains(param.pat.span) {
self.found_arg_pattern = Some(&*param.pat);
self.found_node_ty = Some(ty);
}
}
}
intravisit::walk_body(self, body);
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if let ExprKind::MethodCall(_, call_span, exprs, _) = expr.kind {
if call_span == self.target_span
&& Some(self.target)
== self.infcx.in_progress_typeck_results.and_then(|typeck_results| {
typeck_results
.borrow()
.node_type_opt(exprs.first().unwrap().hir_id)
.map(Into::into)
})
{
self.found_exact_method_call = Some(&expr);
return;
}
}
if self.node_ty_contains_target(expr.hir_id).is_some() {
match expr.kind {
ExprKind::Closure(..) => self.found_closure = Some(&expr),
ExprKind::MethodCall(..) => self.found_method_call = Some(&expr),
_ => {}
}
}
intravisit::walk_expr(self, expr);
}
}
/// Suggest giving an appropriate return type to a closure expression.
fn closure_return_type_suggestion(
span: Span,
err: &mut DiagnosticBuilder<'_>,
output: &FnRetTy<'_>,
body: &Body<'_>,
descr: &str,
name: &str,
ret: &str,
parent_name: Option<String>,
parent_descr: Option<&str>,
) {
let (arrow, post) = match output {
FnRetTy::DefaultReturn(_) => ("-> ", " "),
_ => ("", ""),
};
let suggestion = match body.value.kind {
ExprKind::Block(..) => vec![(output.span(), format!("{}{}{}", arrow, ret, post))],
_ => vec![
(output.span(), format!("{}{}{}{{ ", arrow, ret, post)),
(body.value.span.shrink_to_hi(), " }".to_string()),
],
};
err.multipart_suggestion(
"give this closure an explicit return type without `_` placeholders",
suggestion,
Applicability::HasPlaceholders,
);
err.span_label(
span,
InferCtxt::missing_type_msg("type", &name, &descr, parent_name, parent_descr),
);
}
/// Given a closure signature, return a `String` containing a list of all its argument types.
fn closure_args(fn_sig: &ty::PolyFnSig<'_>) -> String {
fn_sig
.inputs()
.skip_binder()
.iter()
.next()
.map(|args| args.tuple_fields().map(|arg| arg.to_string()).collect::<Vec<_>>().join(", "))
.unwrap_or_default()
}
pub enum TypeAnnotationNeeded {
/// ```compile_fail,E0282
/// let x = "hello".chars().rev().collect();
/// ```
E0282,
/// An implementation cannot be chosen unambiguously because of lack of information.
/// ```compile_fail,E0283
/// let _ = Default::default();
/// ```
E0283,
/// ```compile_fail,E0284
/// let mut d: u64 = 2;
/// d = d % 1u32.into();
/// ```
E0284,
}
impl Into<rustc_errors::DiagnosticId> for TypeAnnotationNeeded {
fn into(self) -> rustc_errors::DiagnosticId {
match self {
Self::E0282 => rustc_errors::error_code!(E0282),
Self::E0283 => rustc_errors::error_code!(E0283),
Self::E0284 => rustc_errors::error_code!(E0284),
}
}
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn extract_type_name(
&self,
arg: GenericArg<'tcx>,
highlight: Option<ty::print::RegionHighlightMode>,
) -> (String, Option<Span>, Cow<'static, str>, Option<String>, Option<&'static str>) {
match arg.unpack() {
GenericArgKind::Type(ty) => {
if let ty::Infer(ty::TyVar(ty_vid)) = *ty.kind() {
let mut inner = self.inner.borrow_mut();
let ty_vars = &inner.type_variables();
let var_origin = ty_vars.var_origin(ty_vid);
if let TypeVariableOriginKind::TypeParameterDefinition(name, def_id) =
var_origin.kind
{
let parent_def_id = def_id.and_then(|def_id| self.tcx.parent(def_id));
let (parent_name, parent_desc) = if let Some(parent_def_id) = parent_def_id
{
let parent_name = self
.tcx
.def_key(parent_def_id)
.disambiguated_data
.data
.get_opt_name()
.map(|parent_symbol| parent_symbol.to_string());
(
parent_name,
Some(self.tcx.def_kind(parent_def_id).descr(parent_def_id)),
)
} else {
(None, None)
};
if name != kw::SelfUpper {
return (
name.to_string(),
Some(var_origin.span),
"type parameter".into(),
parent_name,
parent_desc,
);
}
}
}
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
if let Some(highlight) = highlight {
printer.region_highlight_mode = highlight;
}
let _ = ty.print(printer);
(s, None, ty.prefix_string(), None, None)
}
GenericArgKind::Const(ct) => {
if let ty::ConstKind::Infer(InferConst::Var(vid)) = ct.val {
let origin =
self.inner.borrow_mut().const_unification_table().probe_value(vid).origin;
if let ConstVariableOriginKind::ConstParameterDefinition(name, def_id) =
origin.kind
{
let parent_def_id = self.tcx.parent(def_id);
let (parent_name, parent_descr) = if let Some(parent_def_id) = parent_def_id
{
let parent_name = self
.tcx
.def_key(parent_def_id)
.disambiguated_data
.data
.get_opt_name()
.map(|parent_symbol| parent_symbol.to_string());
(
parent_name,
Some(self.tcx.def_kind(parent_def_id).descr(parent_def_id)),
)
} else {
(None, None)
};
return (
name.to_string(),
Some(origin.span),
"const parameter".into(),
parent_name,
parent_descr,
);
}
}
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
if let Some(highlight) = highlight {
printer.region_highlight_mode = highlight;
}
let _ = ct.print(printer);
(s, None, "<TODO>".into(), None, None)
}
GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"),
}
}
pub fn need_type_info_err(
&self,
body_id: Option<hir::BodyId>,
span: Span,
ty: GenericArg<'tcx>,
error_code: TypeAnnotationNeeded,
) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let (name, name_sp, descr, parent_name, parent_descr) = self.extract_type_name(ty, None);
let kind_str = match ty.unpack() {
GenericArgKind::Type(_) => "type",
GenericArgKind::Const(_) => "the value",
GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"),
};
let mut local_visitor = FindHirNodeVisitor::new(&self, ty.into(), span);
let ty_to_string = |ty: Ty<'tcx>| -> String {
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
let mut inner = self.inner.borrow_mut();
let ty_vars = inner.type_variables();
let getter = move |ty_vid| {
let var_origin = ty_vars.var_origin(ty_vid);
if let TypeVariableOriginKind::TypeParameterDefinition(name, _) = var_origin.kind {
return Some(name.to_string());
}
None
};
printer.name_resolver = Some(Box::new(&getter));
let _ = if let ty::FnDef(..) = ty.kind() {
// We don't want the regular output for `fn`s because it includes its path in
// invalid pseudo-syntax, we want the `fn`-pointer output instead.
ty.fn_sig(self.tcx).print(printer)
} else {
ty.print(printer)
};
s
};
if let Some(body_id) = body_id {
let expr = self.tcx.hir().expect_expr(body_id.hir_id);
local_visitor.visit_expr(expr);
}
let err_span = if let Some(pattern) = local_visitor.found_arg_pattern {
pattern.span
} else if let Some(span) = name_sp {
// `span` here lets us point at `sum` instead of the entire right hand side expr:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | ^^^ cannot infer type for `S`
span
} else if let Some(ExprKind::MethodCall(_, call_span, _, _)) =
local_visitor.found_method_call.map(|e| &e.kind)
{
// Point at the call instead of the whole expression:
// error[E0284]: type annotations needed
// --> file.rs:2:5
// |
// 2 | vec![Ok(2)].into_iter().collect()?;
// | ^^^^^^^ cannot infer type
// |
// = note: cannot resolve `<_ as std::ops::Try>::Ok == _`
if span.contains(*call_span) { *call_span } else { span }
} else {
span
};
let is_named_and_not_impl_trait = |ty: Ty<'_>| {
&ty.to_string() != "_" &&
// FIXME: Remove this check after `impl_trait_in_bindings` is stabilized. #63527
(!ty.is_impl_trait() || self.tcx.features().impl_trait_in_bindings)
};
let ty_msg = match (local_visitor.found_node_ty, local_visitor.found_exact_method_call) {
(_, Some(_)) => String::new(),
(Some(ty), _) if ty.is_closure() => {
let substs =
if let ty::Closure(_, substs) = *ty.kind() { substs } else { unreachable!() };
let fn_sig = substs.as_closure().sig();
let args = closure_args(&fn_sig);
let ret = fn_sig.output().skip_binder().to_string();
format!(" for the closure `fn({}) -> {}`", args, ret)
}
(Some(ty), _) if is_named_and_not_impl_trait(ty) => {
let ty = ty_to_string(ty);
format!(" for `{}`", ty)
}
_ => String::new(),
};
// When `name` corresponds to a type argument, show the path of the full type we're
// trying to infer. In the following example, `ty_msg` contains
// " in `std::result::Result<i32, E>`":
// ```
// error[E0282]: type annotations needed for `std::result::Result<i32, E>`
// --> file.rs:L:CC
// |
// L | let b = Ok(4);
// | - ^^ cannot infer type for `E` in `std::result::Result<i32, E>`
// | |
// | consider giving `b` the explicit type `std::result::Result<i32, E>`, where
// | the type parameter `E` is specified
// ```
let error_code = error_code.into();
let mut err = self.tcx.sess.struct_span_err_with_code(
err_span,
&format!("type annotations needed{}", ty_msg),
error_code,
);
let suffix = match local_visitor.found_node_ty {
Some(ty) if ty.is_closure() => {
let substs =
if let ty::Closure(_, substs) = *ty.kind() { substs } else { unreachable!() };
let fn_sig = substs.as_closure().sig();
let ret = fn_sig.output().skip_binder().to_string();
let closure_decl_and_body_id =
local_visitor.found_closure.and_then(|closure| match &closure.kind {
ExprKind::Closure(_, decl, body_id, ..) => Some((decl, *body_id)),
_ => None,
});
if let Some((decl, body_id)) = closure_decl_and_body_id {
closure_return_type_suggestion(
span,
&mut err,
&decl.output,
self.tcx.hir().body(body_id),
&descr,
&name,
&ret,
parent_name,
parent_descr,
);
// We don't want to give the other suggestions when the problem is the
// closure return type.
return err;
}
// This shouldn't be reachable, but just in case we leave a reasonable fallback.
let args = closure_args(&fn_sig);
// This suggestion is incomplete, as the user will get further type inference
// errors due to the `_` placeholders and the introduction of `Box`, but it does
// nudge them in the right direction.
format!("a boxed closure type like `Box<dyn Fn({}) -> {}>`", args, ret)
}
Some(ty) if is_named_and_not_impl_trait(ty) && name == "_" => {
let ty = ty_to_string(ty);
format!("the explicit type `{}`, with the type parameters specified", ty)
}
Some(ty) if is_named_and_not_impl_trait(ty) && ty.to_string() != name => {
let ty = ty_to_string(ty);
format!(
"the explicit type `{}`, where the type parameter `{}` is specified",
ty, name,
)
}
_ => "a type".to_string(),
};
if let Some(e) = local_visitor.found_exact_method_call {
if let ExprKind::MethodCall(segment, ..) = &e.kind {
// Suggest specifying type params or point out the return type of the call:
//
// error[E0282]: type annotations needed
// --> $DIR/type-annotations-needed-expr.rs:2:39
// |
// LL | let _ = x.into_iter().sum() as f64;
// | ^^^
// | |
// | cannot infer type for `S`
// | help: consider specifying the type argument in
// | the method call: `sum::<S>`
// |
// = note: type must be known at this point
//
// or
//
// error[E0282]: type annotations needed
// --> $DIR/issue-65611.rs:59:20
// |
// LL | let x = buffer.last().unwrap().0.clone();
// | -------^^^^--
// | | |
// | | cannot infer type for `T`
// | this method call resolves to `std::option::Option<&T>`
// |
// = note: type must be known at this point
self.annotate_method_call(segment, e, &mut err);
}
} else if let Some(pattern) = local_visitor.found_arg_pattern {
// We don't want to show the default label for closures.
//
// So, before clearing, the output would look something like this:
// ```
// let x = |_| { };
// - ^^^^ cannot infer type for `[_; 0]`
// |
// consider giving this closure parameter a type
// ```
//
// After clearing, it looks something like this:
// ```
// let x = |_| { };
// ^ consider giving this closure parameter the type `[_; 0]`
// with the type parameter `_` specified
// ```
err.span_label(
pattern.span,
format!("consider giving this closure parameter {}", suffix),
);
} else if let Some(pattern) = local_visitor.found_local_pattern {
let msg = if let Some(simple_ident) = pattern.simple_ident() {
match pattern.span.desugaring_kind() {
None => format!("consider giving `{}` {}", simple_ident, suffix),
Some(DesugaringKind::ForLoop(_)) => {
"the element type for this iterator is not specified".to_string()
}
_ => format!("this needs {}", suffix),
}
} else {
format!("consider giving this pattern {}", suffix)
};
err.span_label(pattern.span, msg);
} else if let Some(e) = local_visitor.found_method_call {
if let ExprKind::MethodCall(segment, ..) = &e.kind {
// Suggest specifying type params or point out the return type of the call:
//
// error[E0282]: type annotations needed
// --> $DIR/type-annotations-needed-expr.rs:2:39
// |
// LL | let _ = x.into_iter().sum() as f64;
// | ^^^
// | |
// | cannot infer type for `S`
// | help: consider specifying the type argument in
// | the method call: `sum::<S>`
// |
// = note: type must be known at this point
//
// or
//
// error[E0282]: type annotations needed
// --> $DIR/issue-65611.rs:59:20
// |
// LL | let x = buffer.last().unwrap().0.clone();
// | -------^^^^--
// | | |
// | | cannot infer type for `T`
// | this method call resolves to `std::option::Option<&T>`
// |
// = note: type must be known at this point
self.annotate_method_call(segment, e, &mut err);
}
}
// Instead of the following:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | --^^^--------- cannot infer type for `S`
// |
// = note: type must be known at this point
// We want:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | ^^^ cannot infer type for `S`
// |
// = note: type must be known at this point
let span = name_sp.unwrap_or(err_span);
if !err
.span
.span_labels()
.iter()
.any(|span_label| span_label.label.is_some() && span_label.span == span)
&& local_visitor.found_arg_pattern.is_none()
{
// Avoid multiple labels pointing at `span`.
err.span_label(
span,
InferCtxt::missing_type_msg(kind_str, &name, &descr, parent_name, parent_descr),
);
}
err
}
/// If the `FnSig` for the method call can be found and type arguments are identified as
/// needed, suggest annotating the call, otherwise point out the resulting type of the call.
fn annotate_method_call(
&self,
segment: &hir::PathSegment<'_>,
e: &Expr<'_>,
err: &mut DiagnosticBuilder<'_>,
) {
if let (Some(typeck_results), None) = (self.in_progress_typeck_results, &segment.args) {
let borrow = typeck_results.borrow();
if let Some((DefKind::AssocFn, did)) = borrow.type_dependent_def(e.hir_id) {
let generics = self.tcx.generics_of(did);
if !generics.params.is_empty() {
err.span_suggestion_verbose(
segment.ident.span.shrink_to_hi(),
&format!(
"consider specifying the type argument{} in the method call",
pluralize!(generics.params.len()),
),
format!(
"::<{}>",
generics
.params
.iter()
.map(|p| p.name.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Applicability::HasPlaceholders,
);
} else {
let sig = self.tcx.fn_sig(did);
let bound_output = sig.output();
let output = bound_output.skip_binder();
err.span_label(e.span, &format!("this method call resolves to `{}`", output));
let kind = output.kind();
if let ty::Projection(proj) = kind {
if let Some(span) = self.tcx.hir().span_if_local(proj.item_def_id) {
err.span_label(span, &format!("`{}` defined here", output));
}
}
}
}
}
}
pub fn need_type_info_err_in_generator(
&self,
kind: hir::GeneratorKind,
span: Span,
ty: Ty<'tcx>,
) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let (name, _, descr, parent_name, parent_descr) = self.extract_type_name(ty.into(), None);
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0698,
"type inside {} must be known in this context",
kind,
);
err.span_label(
span,
InferCtxt::missing_type_msg("type", &name, &descr, parent_name, parent_descr),
);
err
}
fn missing_type_msg(
kind_str: &str,
type_name: &str,
descr: &str,
parent_name: Option<String>,
parent_descr: Option<&str>,
) -> String {
if type_name == "_" {
format!("cannot infer {}", kind_str)
} else {
let parent_desc = if let Some(parent_name) = parent_name {
let parent_type_descr = if let Some(parent_descr) = parent_descr {
format!(" the {}", parent_descr)
} else {
"".into()
};
format!(" declared on{} `{}`", parent_type_descr, parent_name)
} else {
"".to_string()
};
let preposition = if "value" == kind_str { "of" } else { "for" };
// For example: "cannot infer type for type parameter `T`"
format!(
"cannot infer {} {} {} `{}`{}",
kind_str, preposition, descr, type_name, parent_desc
)
.into()
}
}
}
unused path
use crate::infer::type_variable::TypeVariableOriginKind;
use crate::infer::InferCtxt;
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Namespace};
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::{Body, Expr, ExprKind, FnRetTy, HirId, Local, Pat};
use rustc_middle::hir::map::Map;
use rustc_middle::infer::unify_key::ConstVariableOriginKind;
use rustc_middle::ty::print::Print;
use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
use rustc_middle::ty::{self, DefIdTree, InferConst, Ty};
use rustc_span::source_map::DesugaringKind;
use rustc_span::symbol::kw;
use rustc_span::Span;
use std::borrow::Cow;
struct FindHirNodeVisitor<'a, 'tcx> {
infcx: &'a InferCtxt<'a, 'tcx>,
target: GenericArg<'tcx>,
target_span: Span,
found_node_ty: Option<Ty<'tcx>>,
found_local_pattern: Option<&'tcx Pat<'tcx>>,
found_arg_pattern: Option<&'tcx Pat<'tcx>>,
found_closure: Option<&'tcx Expr<'tcx>>,
found_method_call: Option<&'tcx Expr<'tcx>>,
found_exact_method_call: Option<&'tcx Expr<'tcx>>,
}
impl<'a, 'tcx> FindHirNodeVisitor<'a, 'tcx> {
fn new(infcx: &'a InferCtxt<'a, 'tcx>, target: GenericArg<'tcx>, target_span: Span) -> Self {
Self {
infcx,
target,
target_span,
found_node_ty: None,
found_local_pattern: None,
found_arg_pattern: None,
found_closure: None,
found_method_call: None,
found_exact_method_call: None,
}
}
fn node_ty_contains_target(&mut self, hir_id: HirId) -> Option<Ty<'tcx>> {
let ty_opt = self
.infcx
.in_progress_typeck_results
.and_then(|typeck_results| typeck_results.borrow().node_type_opt(hir_id));
match ty_opt {
Some(ty) => {
let ty = self.infcx.resolve_vars_if_possible(&ty);
if ty.walk().any(|inner| {
inner == self.target
|| match (inner.unpack(), self.target.unpack()) {
(GenericArgKind::Type(inner_ty), GenericArgKind::Type(target_ty)) => {
match (inner_ty.kind(), target_ty.kind()) {
(
&ty::Infer(ty::TyVar(a_vid)),
&ty::Infer(ty::TyVar(b_vid)),
) => self
.infcx
.inner
.borrow_mut()
.type_variables()
.sub_unified(a_vid, b_vid),
_ => false,
}
}
_ => false,
}
}) {
Some(ty)
} else {
None
}
}
None => None,
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for FindHirNodeVisitor<'a, 'tcx> {
type Map = Map<'tcx>;
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::OnlyBodies(self.infcx.tcx.hir())
}
fn visit_local(&mut self, local: &'tcx Local<'tcx>) {
if let (None, Some(ty)) =
(self.found_local_pattern, self.node_ty_contains_target(local.hir_id))
{
// FIXME: There's a trade-off here - we can either check that our target span
// is contained in `local.span` or not. If we choose to check containment
// we can avoid some spurious suggestions (see #72690), but we lose
// the ability to report on things like:
//
// ```
// let x = vec![];
// ```
//
// because the target span will be in the macro expansion of `vec![]`.
// At present we choose not to check containment.
self.found_local_pattern = Some(&*local.pat);
self.found_node_ty = Some(ty);
}
intravisit::walk_local(self, local);
}
fn visit_body(&mut self, body: &'tcx Body<'tcx>) {
for param in body.params {
if let (None, Some(ty)) =
(self.found_arg_pattern, self.node_ty_contains_target(param.hir_id))
{
if self.target_span.contains(param.pat.span) {
self.found_arg_pattern = Some(&*param.pat);
self.found_node_ty = Some(ty);
}
}
}
intravisit::walk_body(self, body);
}
fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
if let ExprKind::MethodCall(_, call_span, exprs, _) = expr.kind {
if call_span == self.target_span
&& Some(self.target)
== self.infcx.in_progress_typeck_results.and_then(|typeck_results| {
typeck_results
.borrow()
.node_type_opt(exprs.first().unwrap().hir_id)
.map(Into::into)
})
{
self.found_exact_method_call = Some(&expr);
return;
}
}
if self.node_ty_contains_target(expr.hir_id).is_some() {
match expr.kind {
ExprKind::Closure(..) => self.found_closure = Some(&expr),
ExprKind::MethodCall(..) => self.found_method_call = Some(&expr),
_ => {}
}
}
intravisit::walk_expr(self, expr);
}
}
/// Suggest giving an appropriate return type to a closure expression.
fn closure_return_type_suggestion(
span: Span,
err: &mut DiagnosticBuilder<'_>,
output: &FnRetTy<'_>,
body: &Body<'_>,
descr: &str,
name: &str,
ret: &str,
parent_name: Option<String>,
parent_descr: Option<&str>,
) {
let (arrow, post) = match output {
FnRetTy::DefaultReturn(_) => ("-> ", " "),
_ => ("", ""),
};
let suggestion = match body.value.kind {
ExprKind::Block(..) => vec![(output.span(), format!("{}{}{}", arrow, ret, post))],
_ => vec![
(output.span(), format!("{}{}{}{{ ", arrow, ret, post)),
(body.value.span.shrink_to_hi(), " }".to_string()),
],
};
err.multipart_suggestion(
"give this closure an explicit return type without `_` placeholders",
suggestion,
Applicability::HasPlaceholders,
);
err.span_label(
span,
InferCtxt::missing_type_msg("type", &name, &descr, parent_name, parent_descr),
);
}
/// Given a closure signature, return a `String` containing a list of all its argument types.
fn closure_args(fn_sig: &ty::PolyFnSig<'_>) -> String {
fn_sig
.inputs()
.skip_binder()
.iter()
.next()
.map(|args| args.tuple_fields().map(|arg| arg.to_string()).collect::<Vec<_>>().join(", "))
.unwrap_or_default()
}
pub enum TypeAnnotationNeeded {
/// ```compile_fail,E0282
/// let x = "hello".chars().rev().collect();
/// ```
E0282,
/// An implementation cannot be chosen unambiguously because of lack of information.
/// ```compile_fail,E0283
/// let _ = Default::default();
/// ```
E0283,
/// ```compile_fail,E0284
/// let mut d: u64 = 2;
/// d = d % 1u32.into();
/// ```
E0284,
}
impl Into<rustc_errors::DiagnosticId> for TypeAnnotationNeeded {
fn into(self) -> rustc_errors::DiagnosticId {
match self {
Self::E0282 => rustc_errors::error_code!(E0282),
Self::E0283 => rustc_errors::error_code!(E0283),
Self::E0284 => rustc_errors::error_code!(E0284),
}
}
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn extract_type_name(
&self,
arg: GenericArg<'tcx>,
highlight: Option<ty::print::RegionHighlightMode>,
) -> (String, Option<Span>, Cow<'static, str>, Option<String>, Option<&'static str>) {
match arg.unpack() {
GenericArgKind::Type(ty) => {
if let ty::Infer(ty::TyVar(ty_vid)) = *ty.kind() {
let mut inner = self.inner.borrow_mut();
let ty_vars = &inner.type_variables();
let var_origin = ty_vars.var_origin(ty_vid);
if let TypeVariableOriginKind::TypeParameterDefinition(name, def_id) =
var_origin.kind
{
let parent_def_id = def_id.and_then(|def_id| self.tcx.parent(def_id));
let (parent_name, parent_desc) = if let Some(parent_def_id) = parent_def_id
{
let parent_name = self
.tcx
.def_key(parent_def_id)
.disambiguated_data
.data
.get_opt_name()
.map(|parent_symbol| parent_symbol.to_string());
(
parent_name,
Some(self.tcx.def_kind(parent_def_id).descr(parent_def_id)),
)
} else {
(None, None)
};
if name != kw::SelfUpper {
return (
name.to_string(),
Some(var_origin.span),
"type parameter".into(),
parent_name,
parent_desc,
);
}
}
}
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
if let Some(highlight) = highlight {
printer.region_highlight_mode = highlight;
}
let _ = ty.print(printer);
(s, None, ty.prefix_string(), None, None)
}
GenericArgKind::Const(ct) => {
if let ty::ConstKind::Infer(InferConst::Var(vid)) = ct.val {
let origin =
self.inner.borrow_mut().const_unification_table().probe_value(vid).origin;
if let ConstVariableOriginKind::ConstParameterDefinition(name, def_id) =
origin.kind
{
let parent_def_id = self.tcx.parent(def_id);
let (parent_name, parent_descr) = if let Some(parent_def_id) = parent_def_id
{
let parent_name = self
.tcx
.def_key(parent_def_id)
.disambiguated_data
.data
.get_opt_name()
.map(|parent_symbol| parent_symbol.to_string());
(
parent_name,
Some(self.tcx.def_kind(parent_def_id).descr(parent_def_id)),
)
} else {
(None, None)
};
return (
name.to_string(),
Some(origin.span),
"const parameter".into(),
parent_name,
parent_descr,
);
}
}
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
if let Some(highlight) = highlight {
printer.region_highlight_mode = highlight;
}
let _ = ct.print(printer);
(s, None, "value".into(), None, None)
}
GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"),
}
}
pub fn need_type_info_err(
&self,
body_id: Option<hir::BodyId>,
span: Span,
ty: GenericArg<'tcx>,
error_code: TypeAnnotationNeeded,
) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let (name, name_sp, descr, parent_name, parent_descr) = self.extract_type_name(ty, None);
let kind_str = match ty.unpack() {
GenericArgKind::Type(_) => "type",
GenericArgKind::Const(_) => "the value",
GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"),
};
let mut local_visitor = FindHirNodeVisitor::new(&self, ty.into(), span);
let ty_to_string = |ty: Ty<'tcx>| -> String {
let mut s = String::new();
let mut printer = ty::print::FmtPrinter::new(self.tcx, &mut s, Namespace::TypeNS);
let mut inner = self.inner.borrow_mut();
let ty_vars = inner.type_variables();
let getter = move |ty_vid| {
let var_origin = ty_vars.var_origin(ty_vid);
if let TypeVariableOriginKind::TypeParameterDefinition(name, _) = var_origin.kind {
return Some(name.to_string());
}
None
};
printer.name_resolver = Some(Box::new(&getter));
let _ = if let ty::FnDef(..) = ty.kind() {
// We don't want the regular output for `fn`s because it includes its path in
// invalid pseudo-syntax, we want the `fn`-pointer output instead.
ty.fn_sig(self.tcx).print(printer)
} else {
ty.print(printer)
};
s
};
if let Some(body_id) = body_id {
let expr = self.tcx.hir().expect_expr(body_id.hir_id);
local_visitor.visit_expr(expr);
}
let err_span = if let Some(pattern) = local_visitor.found_arg_pattern {
pattern.span
} else if let Some(span) = name_sp {
// `span` here lets us point at `sum` instead of the entire right hand side expr:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | ^^^ cannot infer type for `S`
span
} else if let Some(ExprKind::MethodCall(_, call_span, _, _)) =
local_visitor.found_method_call.map(|e| &e.kind)
{
// Point at the call instead of the whole expression:
// error[E0284]: type annotations needed
// --> file.rs:2:5
// |
// 2 | vec![Ok(2)].into_iter().collect()?;
// | ^^^^^^^ cannot infer type
// |
// = note: cannot resolve `<_ as std::ops::Try>::Ok == _`
if span.contains(*call_span) { *call_span } else { span }
} else {
span
};
let is_named_and_not_impl_trait = |ty: Ty<'_>| {
&ty.to_string() != "_" &&
// FIXME: Remove this check after `impl_trait_in_bindings` is stabilized. #63527
(!ty.is_impl_trait() || self.tcx.features().impl_trait_in_bindings)
};
let ty_msg = match (local_visitor.found_node_ty, local_visitor.found_exact_method_call) {
(_, Some(_)) => String::new(),
(Some(ty), _) if ty.is_closure() => {
let substs =
if let ty::Closure(_, substs) = *ty.kind() { substs } else { unreachable!() };
let fn_sig = substs.as_closure().sig();
let args = closure_args(&fn_sig);
let ret = fn_sig.output().skip_binder().to_string();
format!(" for the closure `fn({}) -> {}`", args, ret)
}
(Some(ty), _) if is_named_and_not_impl_trait(ty) => {
let ty = ty_to_string(ty);
format!(" for `{}`", ty)
}
_ => String::new(),
};
// When `name` corresponds to a type argument, show the path of the full type we're
// trying to infer. In the following example, `ty_msg` contains
// " in `std::result::Result<i32, E>`":
// ```
// error[E0282]: type annotations needed for `std::result::Result<i32, E>`
// --> file.rs:L:CC
// |
// L | let b = Ok(4);
// | - ^^ cannot infer type for `E` in `std::result::Result<i32, E>`
// | |
// | consider giving `b` the explicit type `std::result::Result<i32, E>`, where
// | the type parameter `E` is specified
// ```
let error_code = error_code.into();
let mut err = self.tcx.sess.struct_span_err_with_code(
err_span,
&format!("type annotations needed{}", ty_msg),
error_code,
);
let suffix = match local_visitor.found_node_ty {
Some(ty) if ty.is_closure() => {
let substs =
if let ty::Closure(_, substs) = *ty.kind() { substs } else { unreachable!() };
let fn_sig = substs.as_closure().sig();
let ret = fn_sig.output().skip_binder().to_string();
let closure_decl_and_body_id =
local_visitor.found_closure.and_then(|closure| match &closure.kind {
ExprKind::Closure(_, decl, body_id, ..) => Some((decl, *body_id)),
_ => None,
});
if let Some((decl, body_id)) = closure_decl_and_body_id {
closure_return_type_suggestion(
span,
&mut err,
&decl.output,
self.tcx.hir().body(body_id),
&descr,
&name,
&ret,
parent_name,
parent_descr,
);
// We don't want to give the other suggestions when the problem is the
// closure return type.
return err;
}
// This shouldn't be reachable, but just in case we leave a reasonable fallback.
let args = closure_args(&fn_sig);
// This suggestion is incomplete, as the user will get further type inference
// errors due to the `_` placeholders and the introduction of `Box`, but it does
// nudge them in the right direction.
format!("a boxed closure type like `Box<dyn Fn({}) -> {}>`", args, ret)
}
Some(ty) if is_named_and_not_impl_trait(ty) && name == "_" => {
let ty = ty_to_string(ty);
format!("the explicit type `{}`, with the type parameters specified", ty)
}
Some(ty) if is_named_and_not_impl_trait(ty) && ty.to_string() != name => {
let ty = ty_to_string(ty);
format!(
"the explicit type `{}`, where the type parameter `{}` is specified",
ty, name,
)
}
_ => "a type".to_string(),
};
if let Some(e) = local_visitor.found_exact_method_call {
if let ExprKind::MethodCall(segment, ..) = &e.kind {
// Suggest specifying type params or point out the return type of the call:
//
// error[E0282]: type annotations needed
// --> $DIR/type-annotations-needed-expr.rs:2:39
// |
// LL | let _ = x.into_iter().sum() as f64;
// | ^^^
// | |
// | cannot infer type for `S`
// | help: consider specifying the type argument in
// | the method call: `sum::<S>`
// |
// = note: type must be known at this point
//
// or
//
// error[E0282]: type annotations needed
// --> $DIR/issue-65611.rs:59:20
// |
// LL | let x = buffer.last().unwrap().0.clone();
// | -------^^^^--
// | | |
// | | cannot infer type for `T`
// | this method call resolves to `std::option::Option<&T>`
// |
// = note: type must be known at this point
self.annotate_method_call(segment, e, &mut err);
}
} else if let Some(pattern) = local_visitor.found_arg_pattern {
// We don't want to show the default label for closures.
//
// So, before clearing, the output would look something like this:
// ```
// let x = |_| { };
// - ^^^^ cannot infer type for `[_; 0]`
// |
// consider giving this closure parameter a type
// ```
//
// After clearing, it looks something like this:
// ```
// let x = |_| { };
// ^ consider giving this closure parameter the type `[_; 0]`
// with the type parameter `_` specified
// ```
err.span_label(
pattern.span,
format!("consider giving this closure parameter {}", suffix),
);
} else if let Some(pattern) = local_visitor.found_local_pattern {
let msg = if let Some(simple_ident) = pattern.simple_ident() {
match pattern.span.desugaring_kind() {
None => format!("consider giving `{}` {}", simple_ident, suffix),
Some(DesugaringKind::ForLoop(_)) => {
"the element type for this iterator is not specified".to_string()
}
_ => format!("this needs {}", suffix),
}
} else {
format!("consider giving this pattern {}", suffix)
};
err.span_label(pattern.span, msg);
} else if let Some(e) = local_visitor.found_method_call {
if let ExprKind::MethodCall(segment, ..) = &e.kind {
// Suggest specifying type params or point out the return type of the call:
//
// error[E0282]: type annotations needed
// --> $DIR/type-annotations-needed-expr.rs:2:39
// |
// LL | let _ = x.into_iter().sum() as f64;
// | ^^^
// | |
// | cannot infer type for `S`
// | help: consider specifying the type argument in
// | the method call: `sum::<S>`
// |
// = note: type must be known at this point
//
// or
//
// error[E0282]: type annotations needed
// --> $DIR/issue-65611.rs:59:20
// |
// LL | let x = buffer.last().unwrap().0.clone();
// | -------^^^^--
// | | |
// | | cannot infer type for `T`
// | this method call resolves to `std::option::Option<&T>`
// |
// = note: type must be known at this point
self.annotate_method_call(segment, e, &mut err);
}
}
// Instead of the following:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | --^^^--------- cannot infer type for `S`
// |
// = note: type must be known at this point
// We want:
// error[E0282]: type annotations needed
// --> file2.rs:3:15
// |
// 3 | let _ = x.sum() as f64;
// | ^^^ cannot infer type for `S`
// |
// = note: type must be known at this point
let span = name_sp.unwrap_or(err_span);
if !err
.span
.span_labels()
.iter()
.any(|span_label| span_label.label.is_some() && span_label.span == span)
&& local_visitor.found_arg_pattern.is_none()
{
// Avoid multiple labels pointing at `span`.
err.span_label(
span,
InferCtxt::missing_type_msg(kind_str, &name, &descr, parent_name, parent_descr),
);
}
err
}
/// If the `FnSig` for the method call can be found and type arguments are identified as
/// needed, suggest annotating the call, otherwise point out the resulting type of the call.
fn annotate_method_call(
&self,
segment: &hir::PathSegment<'_>,
e: &Expr<'_>,
err: &mut DiagnosticBuilder<'_>,
) {
if let (Some(typeck_results), None) = (self.in_progress_typeck_results, &segment.args) {
let borrow = typeck_results.borrow();
if let Some((DefKind::AssocFn, did)) = borrow.type_dependent_def(e.hir_id) {
let generics = self.tcx.generics_of(did);
if !generics.params.is_empty() {
err.span_suggestion_verbose(
segment.ident.span.shrink_to_hi(),
&format!(
"consider specifying the type argument{} in the method call",
pluralize!(generics.params.len()),
),
format!(
"::<{}>",
generics
.params
.iter()
.map(|p| p.name.to_string())
.collect::<Vec<String>>()
.join(", ")
),
Applicability::HasPlaceholders,
);
} else {
let sig = self.tcx.fn_sig(did);
let bound_output = sig.output();
let output = bound_output.skip_binder();
err.span_label(e.span, &format!("this method call resolves to `{}`", output));
let kind = output.kind();
if let ty::Projection(proj) = kind {
if let Some(span) = self.tcx.hir().span_if_local(proj.item_def_id) {
err.span_label(span, &format!("`{}` defined here", output));
}
}
}
}
}
}
pub fn need_type_info_err_in_generator(
&self,
kind: hir::GeneratorKind,
span: Span,
ty: Ty<'tcx>,
) -> DiagnosticBuilder<'tcx> {
let ty = self.resolve_vars_if_possible(&ty);
let (name, _, descr, parent_name, parent_descr) = self.extract_type_name(ty.into(), None);
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0698,
"type inside {} must be known in this context",
kind,
);
err.span_label(
span,
InferCtxt::missing_type_msg("type", &name, &descr, parent_name, parent_descr),
);
err
}
fn missing_type_msg(
kind_str: &str,
type_name: &str,
descr: &str,
parent_name: Option<String>,
parent_descr: Option<&str>,
) -> String {
if type_name == "_" {
format!("cannot infer {}", kind_str)
} else {
let parent_desc = if let Some(parent_name) = parent_name {
let parent_type_descr = if let Some(parent_descr) = parent_descr {
format!(" the {}", parent_descr)
} else {
"".into()
};
format!(" declared on{} `{}`", parent_type_descr, parent_name)
} else {
"".to_string()
};
let preposition = if "value" == kind_str { "of" } else { "for" };
// For example: "cannot infer type for type parameter `T`"
format!(
"cannot infer {} {} {} `{}`{}",
kind_str, preposition, descr, type_name, parent_desc
)
.into()
}
}
}
|
#![cfg(feature = "failpoints")]
mod common;
use std::collections::{BTreeMap, HashSet};
use std::sync::Mutex;
use quickcheck::{Arbitrary, Gen, QuickCheck, StdGen};
use rand::{seq::SliceRandom, Rng};
use sled::*;
#[derive(Debug, Clone)]
enum Op {
Set,
Del(u8),
Id,
Restart,
Flush,
FailPoint(&'static str),
}
use self::Op::*;
impl Arbitrary for Op {
fn arbitrary<G: Gen>(g: &mut G) -> Op {
let fail_points = vec![
"zero garbage segment",
"zero garbage segment post",
"buffer write",
"buffer write post",
"write_config bytes",
"write_config crc",
"write_config post",
"segment initial free zero",
"snap write",
"snap write len",
"snap write crc",
"snap write post",
"snap write mv",
"snap write mv post",
"snap write rm old",
"blob blob write",
"write_blob write crc",
"write_blob write kind_byte",
"write_blob write buf",
];
if g.gen_bool(1. / 30.) {
return FailPoint(fail_points.choose(g).unwrap());
}
if g.gen_bool(1. / 10.) {
return Restart;
}
let choice = g.gen_range(0, 4);
match choice {
0 => Set,
1 => Del(g.gen::<u8>()),
2 => Id,
3 => Flush,
_ => panic!("impossible choice"),
}
}
fn shrink(&self) -> Box<dyn Iterator<Item = Op>> {
match *self {
Op::Del(ref lid) if *lid > 0 => {
Box::new(vec![Op::Del(*lid / 2), Op::Del(*lid - 1)].into_iter())
}
_ => Box::new(vec![].into_iter()),
}
}
}
fn v(b: &[u8]) -> u16 {
if b[0] % 4 != 0 {
assert_eq!(b.len(), 2);
}
(u16::from(b[0]) << 8) + u16::from(b[1])
}
fn tear_down_failpoints() {
for (name, _) in fail::list() {
fail::remove(name);
}
}
#[derive(Debug)]
struct ReferenceEntry {
values: Vec<Option<u16>>,
crash_epoch: u32,
}
fn prop_tree_crashes_nicely(ops: Vec<Op>, flusher: bool) -> bool {
// forces quickcheck to run one thread at a time
static M: Lazy<Mutex<()>, fn() -> Mutex<()>> = Lazy::new(|| Mutex::new(()));
let _lock = M.lock().expect("our test lock should not be poisoned");
// clear all failpoints that may be left over from the last run
tear_down_failpoints();
let res = std::panic::catch_unwind(|| {
run_tree_crashes_nicely(ops.clone(), flusher)
});
tear_down_failpoints();
match res {
Err(e) => {
println!(
"failed with {:?} on ops {:?} flusher {}",
e, ops, flusher
);
false
}
Ok(res) => {
if !res {
println!("failed with ops {:?} flusher: {}", ops, flusher);
}
res
}
}
}
fn run_tree_crashes_nicely(ops: Vec<Op>, flusher: bool) -> bool {
common::setup_logger();
let segment_size = 256;
let config = Config::new()
.temporary(true)
.snapshot_after_ops(1)
.flush_every_ms(if flusher { Some(1) } else { None })
.cache_capacity(256)
.idgen_persist_interval(1)
.segment_size(segment_size);
let mut tree = config.open().expect("tree should start");
let mut reference = BTreeMap::new();
let mut fail_points = HashSet::new();
let mut max_id: isize = -1;
let mut crash_counter = 0;
macro_rules! restart {
() => {
drop(tree);
let tree_res = config.open();
if let Err(ref e) = tree_res {
if e == &Error::FailPoint {
return true;
}
println!("could not start database: {}", e);
return false;
}
tree = tree_res.expect("tree should restart");
let mut ref_iter = reference.iter().map(|(ref rk, ref rv)| (**rk, *rv));
for res in tree.iter() {
let t = match res {
Ok((ref tk, _)) => v(tk),
Err(Error::FailPoint) => return true,
Err(other) => panic!("failed to iterate over items in tree after restart: {:?}", other),
};
// make sure the tree value is in there
while let Some((ref_key, ref_expected)) = ref_iter.next() {
if ref_expected.values.iter().all(Option::is_none) {
// this key should not be present in the tree, skip it and move on to the
// next entry in the reference
continue;
} else if ref_expected.values.iter().all(Option::is_some) {
// this key must be present in the tree, check if the keys from both
// iterators match
if t != ref_key {
println!(
"expected to iterate over {:?} but got {:?} instead",
ref_key,
t
);
return false;
}
break;
} else {
// according to the reference, this key could either be present or absent,
// depending on whether recent writes were successful. check whether the
// keys from the two iterators match, if they do, the key happens to be
// present, which is okay, if they don't, and the tree iterator is further
// ahead than the reference iterator, the key happens to be absent, so we
// skip the entry in the reference. if the reference iterator ever gets
// further than the tree iterator, that means the tree has a key that it
// should not.
if t == ref_key {
// tree and reference agree, we can move on to the next tree item
break;
} else if ref_key > t {
// we have a bug, the reference iterator should always be <= tree
// (this means that the key t was in the tree, but it wasn't in
// the reference, so the reference iterator has advanced on past t)
println!(
"tree verification failed: expected {:?} got {:?}",
ref_key,
t
);
return false;
} else {
// we are iterating through the reference until we have an item that
// must be present or an uncertain item that matches the tree's real
// item anyway
continue;
}
}
}
}
// finish the rest of the reference iterator, and confirm the tree isn't missing
// any keys it needs to have at the end
while let Some((ref_key, ref_expected)) = ref_iter.next() {
if ref_expected.values.iter().all(Option::is_some) {
// this key had to be present, but we got to the end of the tree without
// seeing it
println!("tree verification failed: expected {:?} got end", ref_key);
return false;
}
}
println!("finished verification");
}
}
macro_rules! fp_crash {
($e:expr) => {
match $e {
Ok(thing) => thing,
Err(Error::FailPoint) => {
tear_down_failpoints();
crash_counter += 1;
restart!();
continue;
}
other => {
println!("got non-failpoint err: {:?}", other);
return false;
}
}
};
}
let mut set_counter = 0u16;
println!("ops: {:?}", ops);
for op in ops.into_iter() {
match op {
Set => {
let hi = (set_counter >> 8) as u8;
let lo = set_counter as u8;
let val = if hi % 4 == 0 {
let mut val = vec![hi, lo];
val.extend(vec![
lo;
hi as usize * segment_size / 4
* set_counter as usize
]);
val
} else {
vec![hi, lo]
};
// update the reference to show that this key could be present.
// the next Flush operation will update the
// reference again, and require this key to be present
// (unless there's a crash before then).
let reference_entry = reference
.entry(set_counter)
.or_insert_with(|| ReferenceEntry {
values: vec![None],
crash_epoch: crash_counter,
});
reference_entry.values.push(Some(set_counter));
reference_entry.crash_epoch = crash_counter;
fp_crash!(tree.insert(&[hi, lo], val));
set_counter += 1;
}
Del(k) => {
// if this key was already set, update the reference to show
// that this key could either be present or
// absent. the next Flush operation will update the reference
// again, and require this key to be absent (unless there's a
// crash before then).
reference.entry(u16::from(k)).and_modify(|v| {
v.values.push(None);
v.crash_epoch = crash_counter;
});
fp_crash!(tree.remove(&*vec![0, k]));
}
Id => {
let id = fp_crash!(tree.generate_id());
assert!(
id as isize > max_id,
"generated id of {} is not larger \
than previous max id of {}",
id,
max_id,
);
max_id = id as isize;
}
Flush => {
fp_crash!(tree.flush());
// once a flush has been successfully completed, recent Set/Del
// operations should be durable. go through the
// reference, and if a Set/Del operation was done since
// the last crash, keep the value for that key corresponding to
// the most recent operation, and toss the rest.
for (_key, reference_entry) in reference.iter_mut() {
if reference_entry.values.len() > 1 {
if reference_entry.crash_epoch == crash_counter {
let last = *reference_entry.values.last().unwrap();
reference_entry.values.clear();
reference_entry.values.push(last);
}
}
}
}
Restart => {
restart!();
}
FailPoint(fp) => {
fail_points.insert(fp);
fail::cfg(&*fp, "return")
.expect("should be able to configure failpoint");
}
}
}
true
}
#[test]
#[cfg(not(target_os = "fuchsia"))]
fn quickcheck_tree_with_failpoints() {
// use fewer tests for travis OSX builds that stall out all the time
let n_tests = 50;
let generator_sz = 100;
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), generator_sz))
.tests(n_tests)
.max_tests(10000)
.quickcheck(prop_tree_crashes_nicely as fn(Vec<Op>, bool) -> bool);
}
#[test]
fn failpoints_bug_01() {
// postmortem 1: model did not account for proper reasons to fail to start
assert!(prop_tree_crashes_nicely(
vec![FailPoint("snap write"), Restart],
false,
));
}
#[test]
fn failpoints_bug_2() {
// postmortem 1: the system was assuming the happy path across failpoints
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write post"), Set, Set, Restart],
false,
))
}
#[test]
fn failpoints_bug_3() {
// postmortem 1: this was a regression that happened because we
// chose to eat errors about advancing snapshots, which trigger
// log flushes. We should not trigger flushes from snapshots,
// but first we need to make sure we are better about detecting
// tears, by not also using 0 as a failed flush signifier.
assert!(prop_tree_crashes_nicely(
vec![Set, Set, Set, Set, Set, Set, Set, Set, Restart,],
false,
))
}
#[test]
fn failpoints_bug_4() {
// postmortem 1: the test model was not properly accounting for
// writes that may-or-may-not be present due to an error.
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("snap write"), Del(0), Set, Restart],
false,
))
}
#[test]
fn failpoints_bug_5() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
FailPoint("snap write mv post"),
Set,
FailPoint("snap write"),
Set,
Set,
Set,
Restart,
FailPoint("zero segment"),
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_6() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Del(0),
Set,
Set,
Set,
Restart,
FailPoint("zero segment post"),
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_7() {
// postmortem 1: We were crashing because a Segment was
// in the SegmentAccountant's to_clean Vec, but it had
// no present pages. This can legitimately happen when
// a Segment only contains failed log flushes.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(17),
Del(29),
Del(246),
Del(248),
Set,
],
false,
))
}
#[test]
fn failpoints_bug_8() {
// postmortem 1: we were assuming that deletes would fail if buffer writes
// are disabled, but that's not true, because deletes might not cause any
// writes if the value was not present.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(0),
FailPoint("buffer write post"),
Del(179),
],
false,
))
}
#[test]
fn failpoints_bug_9() {
// postmortem 1: recovery was not properly accounting for
// ordering issues around allocation and freeing of pages.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Restart,
Del(110),
Del(0),
Set,
Restart,
Set,
Del(255),
Set,
Set,
Set,
Set,
Set,
Del(38),
Set,
Set,
Del(253),
Set,
Restart,
Set,
Del(19),
Set,
Del(118),
Set,
Set,
Set,
Set,
Set,
Del(151),
Set,
Set,
Del(201),
Set,
Restart,
Set,
Set,
Del(17),
Set,
Set,
Set,
Del(230),
Set,
Restart,
],
true,
))
}
#[test]
fn failpoints_bug_10() {
// expected to iterate over 50 but got 49 instead
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Del(175),
Del(19),
Restart,
Del(155),
Del(111),
Set,
Del(4),
Set,
Set,
Set,
Set,
Restart,
Del(94),
Set,
Del(83),
Del(181),
Del(218),
Set,
Set,
Del(60),
Del(248),
Set,
Set,
Set,
Del(167),
Del(180),
Del(180),
Set,
Restart,
Del(14),
Set,
Set,
Del(156),
Del(29),
Del(190),
Set,
Set,
Del(245),
Set,
Del(231),
Del(95),
Set,
Restart,
Set,
Del(189),
Set,
Restart,
Set,
Del(249),
Set,
Set,
Del(110),
Del(75),
Set,
Restart,
Del(156),
Del(140),
Del(101),
Del(45),
Del(115),
Del(162),
Set,
Set,
Del(192),
Del(31),
Del(224),
Set,
Del(84),
Del(6),
Set,
Del(191),
Set,
Set,
Set,
Del(86),
Del(143),
Del(168),
Del(175),
Set,
Restart,
Set,
Set,
Set,
Set,
Set,
Restart,
Del(14),
Set,
Set,
Set,
Set,
Set,
Set,
Del(60),
Set,
Del(115),
Restart,
Set,
Del(203),
Del(12),
Del(134),
Del(118),
Del(26),
Del(161),
Set,
Del(6),
Del(23),
Set,
Del(122),
Del(251),
Set,
Restart,
Set,
Set,
Del(252),
Del(88),
Set,
Del(140),
Del(164),
Del(203),
Del(165),
Set,
Set,
Restart,
Del(0),
Set,
Del(146),
Del(83),
Restart,
Del(0),
Set,
Del(55),
Set,
Set,
Del(89),
Set,
Set,
Del(105),
Restart,
Set,
Restart,
Del(145),
Set,
Del(17),
Del(123),
Set,
Del(203),
Set,
Set,
Set,
Set,
Del(192),
Del(58),
Restart,
Set,
Restart,
Set,
Restart,
Set,
Del(142),
Set,
Del(220),
Del(185),
Set,
Del(86),
Set,
Set,
Del(123),
Set,
Restart,
Del(56),
Del(191),
Set,
Set,
Set,
Set,
Set,
Del(123),
Set,
Set,
Set,
Restart,
Del(20),
Del(47),
Del(207),
Del(45),
Set,
Set,
Set,
Del(83),
Set,
Del(92),
Del(117),
Set,
Set,
Restart,
Del(241),
Set,
Del(49),
Set,
],
false,
))
}
#[test]
fn failpoints_bug_11() {
// dupe lsn detected
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Restart,
Del(21),
Set,
Set,
FailPoint("buffer write post"),
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_12() {
// postmortem 1: we were not sorting the recovery state, which
// led to divergent state across recoveries. TODO wut
assert!(prop_tree_crashes_nicely(
vec![
Set,
Del(0),
Set,
Set,
Set,
Set,
Set,
Set,
Restart,
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_13() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(0),
Set,
Set,
Set,
Del(2),
Set,
Set,
Set,
Set,
Del(1),
Del(3),
Del(18),
Set,
Set,
Set,
Restart,
Set,
Set,
Set,
Set,
FailPoint("snap write"),
Del(4),
],
false,
))
}
#[test]
fn failpoints_bug_14() {
// postmortem 1: improper bounds on splits caused a loop to happen
assert!(prop_tree_crashes_nicely(
vec![
FailPoint("blob blob write"),
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
],
false,
))
}
#[test]
fn failpoints_bug_15() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Id, Restart, Id],
false,
))
}
#[test]
fn failpoints_bug_16() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![FailPoint("zero garbage segment"), Id, Id],
false,
))
}
#[test]
fn failpoints_bug_17() {
// postmortem 1: during recovery we were not properly
// filtering replaced pages in segments by the source
// segment still
assert!(prop_tree_crashes_nicely(
vec![
Del(0),
Set,
Set,
Set,
Del(3),
Id,
Id,
Set,
Id,
Id,
Del(3),
Id,
Id,
Del(3),
Restart,
Id,
FailPoint("blob blob write"),
Id,
Restart,
Id,
Set,
Id,
Del(3),
Set
],
false,
))
}
#[test]
fn failpoints_bug_18() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Id, Id, Set, Id, Id, Id, Set, Del(0), Restart, Del(0), Id, Set],
false,
))
}
#[test]
fn failpoints_bug_19() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Del(4),
Id,
Del(4),
Id,
Id,
Set,
Set,
Set,
Set,
Set,
Id,
Set,
Set,
Del(11),
Del(13),
Id,
Del(122),
Del(134),
Del(101),
Del(81),
Set,
Del(15),
Del(76),
Restart,
Set,
Id,
Id,
Set,
Restart
],
false,
))
}
#[test]
fn failpoints_bug_20() {
// postmortem 1: failed to filter out segments with
// uninitialized segment ID's when creating a segment
// iterator.
assert!(prop_tree_crashes_nicely(
vec![Restart, Set, Set, Del(0), Id, Id, Set, Del(0), Id, Set],
false,
))
}
#[test]
fn failpoints_bug_21() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Id,
Del(242),
Set,
Del(172),
Id,
Del(142),
Del(183),
Set,
Set,
Set,
Set,
Set,
Id,
Id,
Set,
Id,
Set,
Id,
Del(187),
Set,
Id,
Set,
Id,
Del(152),
Del(231),
Del(45),
Del(181),
Restart,
Id,
Id,
Id,
Id,
Id,
Set,
Del(53),
Restart,
Set,
Del(202),
Id,
Set,
Set,
Set,
Id,
Restart,
Del(99),
Set,
Set,
Id,
Restart,
Del(93),
Id,
Set,
Del(38),
Id,
Del(158),
Del(49),
Id,
Del(145),
Del(35),
Set,
Del(94),
Del(115),
Id,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_22() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Id, FailPoint("buffer write"), Set, Id],
false,
))
}
#[test]
fn failpoints_bug_23() {
// postmortem 1: failed to handle allocation failures
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("blob blob write"), Set, Set, Set],
false,
))
}
#[test]
fn failpoints_bug_24() {
// postmortem 1: was incorrectly setting global
// errors, and they were being used-after-free
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Id,],
false,
))
}
#[test]
fn failpoints_bug_25() {
// postmortem 1: after removing segment trailers, we
// no longer have the invariant that a write
// must be more than one byte
assert!(prop_tree_crashes_nicely(
vec![
Del(103),
Restart,
Del(242),
Del(125),
Restart,
Set,
Restart,
Id,
Del(183),
Id,
FailPoint("snap write crc"),
Del(141),
Del(8),
Del(188),
Set,
Set,
Restart,
Id,
Id,
Id,
Set,
Id,
Id,
Set,
Del(65),
Del(6),
Del(198),
Del(57),
Id,
FailPoint("snap write mv"),
Set,
Del(164),
Del(43),
Del(161),
Id,
Restart,
Set,
Id,
Id,
Set,
Set,
Restart,
Restart,
Set,
Set,
Del(252),
Set,
Del(111),
Id,
Del(55)
],
false,
))
}
#[test]
fn failpoints_bug_26() {
// postmortem 1: after removing segment trailers, we
// no longer handled maxed segment recovery properly
assert!(prop_tree_crashes_nicely(
vec![
Id,
Set,
Set,
Del(167),
Del(251),
Del(24),
Set,
Del(111),
Id,
Del(133),
Del(187),
Restart,
Set,
Del(52),
Set,
Restart,
Set,
Set,
Id,
Set,
Set,
Id,
Id,
Set,
Set,
Del(95),
Set,
Id,
Del(59),
Del(133),
Del(209),
Id,
Del(89),
Id,
Set,
Del(46),
Set,
Del(246),
Restart,
Set,
Restart,
Restart,
Del(28),
Set,
Del(9),
Del(101),
Id,
Del(73),
Del(192),
Set,
Set,
Set,
Id,
Set,
Set,
Set,
Id,
Restart,
Del(92),
Del(212),
Del(215)
],
false,
))
}
#[test]
fn failpoints_bug_27() {
// postmortem 1: a segment is recovered as empty at recovery,
// which prevented its lsn from being known, and when the SA
// was recovered it erroneously calculated its lsn as being -1
assert!(prop_tree_crashes_nicely(
vec![
Id,
Id,
Set,
Set,
Restart,
Set,
Id,
Id,
Set,
Del(197),
Del(148),
Restart,
Id,
Set,
Del(165),
Set,
Set,
Set,
Set,
Id,
Del(29),
Set,
Set,
Del(75),
Del(170),
Restart,
Restart,
Set
],
true,
))
}
#[test]
fn failpoints_bug_28() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Del(61),
Id,
Del(127),
Set,
Restart,
Del(219),
Id,
Set,
Id,
Del(41),
Id,
Id,
Set,
Del(227),
Set,
Del(191),
Id,
Del(78),
Set,
Id,
Set,
Del(123),
Restart,
Restart,
Restart,
Id
],
true,
))
}
#[test]
fn failpoints_bug_29() {
// postmortem 1: the test model was turning uncertain entries
// into certain entries even when there was an intervening crash
// between the Set and the Flush
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Set, Flush, Restart],
false,
));
assert!(prop_tree_crashes_nicely(
vec![Set, Set, Set, FailPoint("snap write mv"), Set, Flush, Restart],
false,
));
}
#[test]
fn failpoints_bug_30() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("buffer write"), Restart, Flush, Id],
false,
));
}
when the db hits a failpoint during drop, treat it the same as when a failpoint is hit during initialization
#![cfg(feature = "failpoints")]
mod common;
use std::collections::{BTreeMap, HashSet};
use std::sync::Mutex;
use quickcheck::{Arbitrary, Gen, QuickCheck, StdGen};
use rand::{seq::SliceRandom, Rng};
use sled::*;
#[derive(Debug, Clone)]
enum Op {
Set,
Del(u8),
Id,
Restart,
Flush,
FailPoint(&'static str),
}
use self::Op::*;
impl Arbitrary for Op {
fn arbitrary<G: Gen>(g: &mut G) -> Op {
let fail_points = vec![
"zero garbage segment",
"zero garbage segment post",
"buffer write",
"buffer write post",
"write_config bytes",
"write_config crc",
"write_config post",
"segment initial free zero",
"snap write",
"snap write len",
"snap write crc",
"snap write post",
"snap write mv",
"snap write mv post",
"snap write rm old",
"blob blob write",
"write_blob write crc",
"write_blob write kind_byte",
"write_blob write buf",
];
if g.gen_bool(1. / 30.) {
return FailPoint(fail_points.choose(g).unwrap());
}
if g.gen_bool(1. / 10.) {
return Restart;
}
let choice = g.gen_range(0, 4);
match choice {
0 => Set,
1 => Del(g.gen::<u8>()),
2 => Id,
3 => Flush,
_ => panic!("impossible choice"),
}
}
fn shrink(&self) -> Box<dyn Iterator<Item = Op>> {
match *self {
Op::Del(ref lid) if *lid > 0 => {
Box::new(vec![Op::Del(*lid / 2), Op::Del(*lid - 1)].into_iter())
}
_ => Box::new(vec![].into_iter()),
}
}
}
fn v(b: &[u8]) -> u16 {
if b[0] % 4 != 0 {
assert_eq!(b.len(), 2);
}
(u16::from(b[0]) << 8) + u16::from(b[1])
}
fn tear_down_failpoints() {
for (name, _) in fail::list() {
fail::remove(name);
}
}
#[derive(Debug)]
struct ReferenceEntry {
values: Vec<Option<u16>>,
crash_epoch: u32,
}
fn prop_tree_crashes_nicely(ops: Vec<Op>, flusher: bool) -> bool {
// forces quickcheck to run one thread at a time
static M: Lazy<Mutex<()>, fn() -> Mutex<()>> = Lazy::new(|| Mutex::new(()));
let _lock = M.lock().expect("our test lock should not be poisoned");
// clear all failpoints that may be left over from the last run
tear_down_failpoints();
let res = std::panic::catch_unwind(|| {
run_tree_crashes_nicely(ops.clone(), flusher)
});
tear_down_failpoints();
match res {
Err(e) => {
println!(
"failed with {:?} on ops {:?} flusher {}",
e, ops, flusher
);
false
}
Ok(res) => {
if !res {
println!("failed with ops {:?} flusher: {}", ops, flusher);
}
res
}
}
}
fn run_tree_crashes_nicely(ops: Vec<Op>, flusher: bool) -> bool {
common::setup_logger();
let segment_size = 256;
let config = Config::new()
.temporary(true)
.snapshot_after_ops(1)
.flush_every_ms(if flusher { Some(1) } else { None })
.cache_capacity(256)
.idgen_persist_interval(1)
.segment_size(segment_size);
let mut tree = config.open().expect("tree should start");
let mut reference = BTreeMap::new();
let mut fail_points = HashSet::new();
let mut max_id: isize = -1;
let mut crash_counter = 0;
macro_rules! restart {
() => {
drop(tree);
let tree_res = config.global_error().and_then(|_| config.open());
if let Err(ref e) = tree_res {
if e == &Error::FailPoint {
return true;
}
println!("could not start database: {}", e);
return false;
}
tree = tree_res.expect("tree should restart");
let mut ref_iter = reference.iter().map(|(ref rk, ref rv)| (**rk, *rv));
for res in tree.iter() {
let t = match res {
Ok((ref tk, _)) => v(tk),
Err(Error::FailPoint) => return true,
Err(other) => panic!("failed to iterate over items in tree after restart: {:?}", other),
};
// make sure the tree value is in there
while let Some((ref_key, ref_expected)) = ref_iter.next() {
if ref_expected.values.iter().all(Option::is_none) {
// this key should not be present in the tree, skip it and move on to the
// next entry in the reference
continue;
} else if ref_expected.values.iter().all(Option::is_some) {
// this key must be present in the tree, check if the keys from both
// iterators match
if t != ref_key {
println!(
"expected to iterate over {:?} but got {:?} instead",
ref_key,
t
);
return false;
}
break;
} else {
// according to the reference, this key could either be present or absent,
// depending on whether recent writes were successful. check whether the
// keys from the two iterators match, if they do, the key happens to be
// present, which is okay, if they don't, and the tree iterator is further
// ahead than the reference iterator, the key happens to be absent, so we
// skip the entry in the reference. if the reference iterator ever gets
// further than the tree iterator, that means the tree has a key that it
// should not.
if t == ref_key {
// tree and reference agree, we can move on to the next tree item
break;
} else if ref_key > t {
// we have a bug, the reference iterator should always be <= tree
// (this means that the key t was in the tree, but it wasn't in
// the reference, so the reference iterator has advanced on past t)
println!(
"tree verification failed: expected {:?} got {:?}",
ref_key,
t
);
return false;
} else {
// we are iterating through the reference until we have an item that
// must be present or an uncertain item that matches the tree's real
// item anyway
continue;
}
}
}
}
// finish the rest of the reference iterator, and confirm the tree isn't missing
// any keys it needs to have at the end
while let Some((ref_key, ref_expected)) = ref_iter.next() {
if ref_expected.values.iter().all(Option::is_some) {
// this key had to be present, but we got to the end of the tree without
// seeing it
println!("tree verification failed: expected {:?} got end", ref_key);
println!("expected: {:?}", ref_expected);
println!("tree: {:?}", tree);
return false;
}
}
println!("finished verification");
}
}
macro_rules! fp_crash {
($e:expr) => {
match $e {
Ok(thing) => thing,
Err(Error::FailPoint) => {
tear_down_failpoints();
crash_counter += 1;
restart!();
continue;
}
other => {
println!("got non-failpoint err: {:?}", other);
return false;
}
}
};
}
let mut set_counter = 0u16;
println!("ops: {:?}", ops);
for op in ops.into_iter() {
match op {
Set => {
let hi = (set_counter >> 8) as u8;
let lo = set_counter as u8;
let val = if hi % 4 == 0 {
let mut val = vec![hi, lo];
val.extend(vec![
lo;
hi as usize * segment_size / 4
* set_counter as usize
]);
val
} else {
vec![hi, lo]
};
// update the reference to show that this key could be present.
// the next Flush operation will update the
// reference again, and require this key to be present
// (unless there's a crash before then).
let reference_entry = reference
.entry(set_counter)
.or_insert_with(|| ReferenceEntry {
values: vec![None],
crash_epoch: crash_counter,
});
reference_entry.values.push(Some(set_counter));
reference_entry.crash_epoch = crash_counter;
fp_crash!(tree.insert(&[hi, lo], val));
set_counter += 1;
}
Del(k) => {
// if this key was already set, update the reference to show
// that this key could either be present or
// absent. the next Flush operation will update the reference
// again, and require this key to be absent (unless there's a
// crash before then).
reference.entry(u16::from(k)).and_modify(|v| {
v.values.push(None);
v.crash_epoch = crash_counter;
});
fp_crash!(tree.remove(&*vec![0, k]));
}
Id => {
let id = fp_crash!(tree.generate_id());
assert!(
id as isize > max_id,
"generated id of {} is not larger \
than previous max id of {}",
id,
max_id,
);
max_id = id as isize;
}
Flush => {
fp_crash!(tree.flush());
// once a flush has been successfully completed, recent Set/Del
// operations should be durable. go through the
// reference, and if a Set/Del operation was done since
// the last crash, keep the value for that key corresponding to
// the most recent operation, and toss the rest.
for (_key, reference_entry) in reference.iter_mut() {
if reference_entry.values.len() > 1 {
if reference_entry.crash_epoch == crash_counter {
let last = *reference_entry.values.last().unwrap();
reference_entry.values.clear();
reference_entry.values.push(last);
}
}
}
}
Restart => {
restart!();
}
FailPoint(fp) => {
fail_points.insert(fp);
fail::cfg(&*fp, "return")
.expect("should be able to configure failpoint");
}
}
}
true
}
#[test]
#[cfg(not(target_os = "fuchsia"))]
fn quickcheck_tree_with_failpoints() {
// use fewer tests for travis OSX builds that stall out all the time
let n_tests = 50;
let generator_sz = 100;
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), generator_sz))
.tests(n_tests)
.max_tests(10000)
.quickcheck(prop_tree_crashes_nicely as fn(Vec<Op>, bool) -> bool);
}
#[test]
fn failpoints_bug_01() {
// postmortem 1: model did not account for proper reasons to fail to start
assert!(prop_tree_crashes_nicely(
vec![FailPoint("snap write"), Restart],
false,
));
}
#[test]
fn failpoints_bug_2() {
// postmortem 1: the system was assuming the happy path across failpoints
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write post"), Set, Set, Restart],
false,
))
}
#[test]
fn failpoints_bug_3() {
// postmortem 1: this was a regression that happened because we
// chose to eat errors about advancing snapshots, which trigger
// log flushes. We should not trigger flushes from snapshots,
// but first we need to make sure we are better about detecting
// tears, by not also using 0 as a failed flush signifier.
assert!(prop_tree_crashes_nicely(
vec![Set, Set, Set, Set, Set, Set, Set, Set, Restart,],
false,
))
}
#[test]
fn failpoints_bug_4() {
// postmortem 1: the test model was not properly accounting for
// writes that may-or-may-not be present due to an error.
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("snap write"), Del(0), Set, Restart],
false,
))
}
#[test]
fn failpoints_bug_5() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
FailPoint("snap write mv post"),
Set,
FailPoint("snap write"),
Set,
Set,
Set,
Restart,
FailPoint("zero segment"),
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_6() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Del(0),
Set,
Set,
Set,
Restart,
FailPoint("zero segment post"),
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_7() {
// postmortem 1: We were crashing because a Segment was
// in the SegmentAccountant's to_clean Vec, but it had
// no present pages. This can legitimately happen when
// a Segment only contains failed log flushes.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(17),
Del(29),
Del(246),
Del(248),
Set,
],
false,
))
}
#[test]
fn failpoints_bug_8() {
// postmortem 1: we were assuming that deletes would fail if buffer writes
// are disabled, but that's not true, because deletes might not cause any
// writes if the value was not present.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(0),
FailPoint("buffer write post"),
Del(179),
],
false,
))
}
#[test]
fn failpoints_bug_9() {
// postmortem 1: recovery was not properly accounting for
// ordering issues around allocation and freeing of pages.
assert!(prop_tree_crashes_nicely(
vec![
Set,
Restart,
Del(110),
Del(0),
Set,
Restart,
Set,
Del(255),
Set,
Set,
Set,
Set,
Set,
Del(38),
Set,
Set,
Del(253),
Set,
Restart,
Set,
Del(19),
Set,
Del(118),
Set,
Set,
Set,
Set,
Set,
Del(151),
Set,
Set,
Del(201),
Set,
Restart,
Set,
Set,
Del(17),
Set,
Set,
Set,
Del(230),
Set,
Restart,
],
true,
))
}
#[test]
fn failpoints_bug_10() {
// expected to iterate over 50 but got 49 instead
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Del(175),
Del(19),
Restart,
Del(155),
Del(111),
Set,
Del(4),
Set,
Set,
Set,
Set,
Restart,
Del(94),
Set,
Del(83),
Del(181),
Del(218),
Set,
Set,
Del(60),
Del(248),
Set,
Set,
Set,
Del(167),
Del(180),
Del(180),
Set,
Restart,
Del(14),
Set,
Set,
Del(156),
Del(29),
Del(190),
Set,
Set,
Del(245),
Set,
Del(231),
Del(95),
Set,
Restart,
Set,
Del(189),
Set,
Restart,
Set,
Del(249),
Set,
Set,
Del(110),
Del(75),
Set,
Restart,
Del(156),
Del(140),
Del(101),
Del(45),
Del(115),
Del(162),
Set,
Set,
Del(192),
Del(31),
Del(224),
Set,
Del(84),
Del(6),
Set,
Del(191),
Set,
Set,
Set,
Del(86),
Del(143),
Del(168),
Del(175),
Set,
Restart,
Set,
Set,
Set,
Set,
Set,
Restart,
Del(14),
Set,
Set,
Set,
Set,
Set,
Set,
Del(60),
Set,
Del(115),
Restart,
Set,
Del(203),
Del(12),
Del(134),
Del(118),
Del(26),
Del(161),
Set,
Del(6),
Del(23),
Set,
Del(122),
Del(251),
Set,
Restart,
Set,
Set,
Del(252),
Del(88),
Set,
Del(140),
Del(164),
Del(203),
Del(165),
Set,
Set,
Restart,
Del(0),
Set,
Del(146),
Del(83),
Restart,
Del(0),
Set,
Del(55),
Set,
Set,
Del(89),
Set,
Set,
Del(105),
Restart,
Set,
Restart,
Del(145),
Set,
Del(17),
Del(123),
Set,
Del(203),
Set,
Set,
Set,
Set,
Del(192),
Del(58),
Restart,
Set,
Restart,
Set,
Restart,
Set,
Del(142),
Set,
Del(220),
Del(185),
Set,
Del(86),
Set,
Set,
Del(123),
Set,
Restart,
Del(56),
Del(191),
Set,
Set,
Set,
Set,
Set,
Del(123),
Set,
Set,
Set,
Restart,
Del(20),
Del(47),
Del(207),
Del(45),
Set,
Set,
Set,
Del(83),
Set,
Del(92),
Del(117),
Set,
Set,
Restart,
Del(241),
Set,
Del(49),
Set,
],
false,
))
}
#[test]
fn failpoints_bug_11() {
// dupe lsn detected
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Restart,
Del(21),
Set,
Set,
FailPoint("buffer write post"),
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_12() {
// postmortem 1: we were not sorting the recovery state, which
// led to divergent state across recoveries. TODO wut
assert!(prop_tree_crashes_nicely(
vec![
Set,
Del(0),
Set,
Set,
Set,
Set,
Set,
Set,
Restart,
Set,
Set,
Set,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_13() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Del(0),
Set,
Set,
Set,
Del(2),
Set,
Set,
Set,
Set,
Del(1),
Del(3),
Del(18),
Set,
Set,
Set,
Restart,
Set,
Set,
Set,
Set,
FailPoint("snap write"),
Del(4),
],
false,
))
}
#[test]
fn failpoints_bug_14() {
// postmortem 1: improper bounds on splits caused a loop to happen
assert!(prop_tree_crashes_nicely(
vec![
FailPoint("blob blob write"),
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
Set,
],
false,
))
}
#[test]
fn failpoints_bug_15() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Id, Restart, Id],
false,
))
}
#[test]
fn failpoints_bug_16() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![FailPoint("zero garbage segment"), Id, Id],
false,
))
}
#[test]
fn failpoints_bug_17() {
// postmortem 1: during recovery we were not properly
// filtering replaced pages in segments by the source
// segment still
assert!(prop_tree_crashes_nicely(
vec![
Del(0),
Set,
Set,
Set,
Del(3),
Id,
Id,
Set,
Id,
Id,
Del(3),
Id,
Id,
Del(3),
Restart,
Id,
FailPoint("blob blob write"),
Id,
Restart,
Id,
Set,
Id,
Del(3),
Set
],
false,
))
}
#[test]
fn failpoints_bug_18() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Id, Id, Set, Id, Id, Id, Set, Del(0), Restart, Del(0), Id, Set],
false,
))
}
#[test]
fn failpoints_bug_19() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Set,
Set,
Set,
Set,
Del(4),
Id,
Del(4),
Id,
Id,
Set,
Set,
Set,
Set,
Set,
Id,
Set,
Set,
Del(11),
Del(13),
Id,
Del(122),
Del(134),
Del(101),
Del(81),
Set,
Del(15),
Del(76),
Restart,
Set,
Id,
Id,
Set,
Restart
],
false,
))
}
#[test]
fn failpoints_bug_20() {
// postmortem 1: failed to filter out segments with
// uninitialized segment ID's when creating a segment
// iterator.
assert!(prop_tree_crashes_nicely(
vec![Restart, Set, Set, Del(0), Id, Id, Set, Del(0), Id, Set],
false,
))
}
#[test]
fn failpoints_bug_21() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Id,
Del(242),
Set,
Del(172),
Id,
Del(142),
Del(183),
Set,
Set,
Set,
Set,
Set,
Id,
Id,
Set,
Id,
Set,
Id,
Del(187),
Set,
Id,
Set,
Id,
Del(152),
Del(231),
Del(45),
Del(181),
Restart,
Id,
Id,
Id,
Id,
Id,
Set,
Del(53),
Restart,
Set,
Del(202),
Id,
Set,
Set,
Set,
Id,
Restart,
Del(99),
Set,
Set,
Id,
Restart,
Del(93),
Id,
Set,
Del(38),
Id,
Del(158),
Del(49),
Id,
Del(145),
Del(35),
Set,
Del(94),
Del(115),
Id,
Restart,
],
false,
))
}
#[test]
fn failpoints_bug_22() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Id, FailPoint("buffer write"), Set, Id],
false,
))
}
#[test]
fn failpoints_bug_23() {
// postmortem 1: failed to handle allocation failures
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("blob blob write"), Set, Set, Set],
false,
))
}
#[test]
fn failpoints_bug_24() {
// postmortem 1: was incorrectly setting global
// errors, and they were being used-after-free
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Id,],
false,
))
}
#[test]
fn failpoints_bug_25() {
// postmortem 1: after removing segment trailers, we
// no longer have the invariant that a write
// must be more than one byte
assert!(prop_tree_crashes_nicely(
vec![
Del(103),
Restart,
Del(242),
Del(125),
Restart,
Set,
Restart,
Id,
Del(183),
Id,
FailPoint("snap write crc"),
Del(141),
Del(8),
Del(188),
Set,
Set,
Restart,
Id,
Id,
Id,
Set,
Id,
Id,
Set,
Del(65),
Del(6),
Del(198),
Del(57),
Id,
FailPoint("snap write mv"),
Set,
Del(164),
Del(43),
Del(161),
Id,
Restart,
Set,
Id,
Id,
Set,
Set,
Restart,
Restart,
Set,
Set,
Del(252),
Set,
Del(111),
Id,
Del(55)
],
false,
))
}
#[test]
fn failpoints_bug_26() {
// postmortem 1: after removing segment trailers, we
// no longer handled maxed segment recovery properly
assert!(prop_tree_crashes_nicely(
vec![
Id,
Set,
Set,
Del(167),
Del(251),
Del(24),
Set,
Del(111),
Id,
Del(133),
Del(187),
Restart,
Set,
Del(52),
Set,
Restart,
Set,
Set,
Id,
Set,
Set,
Id,
Id,
Set,
Set,
Del(95),
Set,
Id,
Del(59),
Del(133),
Del(209),
Id,
Del(89),
Id,
Set,
Del(46),
Set,
Del(246),
Restart,
Set,
Restart,
Restart,
Del(28),
Set,
Del(9),
Del(101),
Id,
Del(73),
Del(192),
Set,
Set,
Set,
Id,
Set,
Set,
Set,
Id,
Restart,
Del(92),
Del(212),
Del(215)
],
false,
))
}
#[test]
fn failpoints_bug_27() {
// postmortem 1: a segment is recovered as empty at recovery,
// which prevented its lsn from being known, and when the SA
// was recovered it erroneously calculated its lsn as being -1
assert!(prop_tree_crashes_nicely(
vec![
Id,
Id,
Set,
Set,
Restart,
Set,
Id,
Id,
Set,
Del(197),
Del(148),
Restart,
Id,
Set,
Del(165),
Set,
Set,
Set,
Set,
Id,
Del(29),
Set,
Set,
Del(75),
Del(170),
Restart,
Restart,
Set
],
true,
))
}
#[test]
fn failpoints_bug_28() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![
Del(61),
Id,
Del(127),
Set,
Restart,
Del(219),
Id,
Set,
Id,
Del(41),
Id,
Id,
Set,
Del(227),
Set,
Del(191),
Id,
Del(78),
Set,
Id,
Set,
Del(123),
Restart,
Restart,
Restart,
Id
],
true,
))
}
#[test]
fn failpoints_bug_29() {
// postmortem 1: the test model was turning uncertain entries
// into certain entries even when there was an intervening crash
// between the Set and the Flush
assert!(prop_tree_crashes_nicely(
vec![FailPoint("buffer write"), Set, Flush, Restart],
false,
));
assert!(prop_tree_crashes_nicely(
vec![Set, Set, Set, FailPoint("snap write mv"), Set, Flush, Restart],
false,
));
}
#[test]
fn failpoints_bug_30() {
// postmortem 1:
assert!(prop_tree_crashes_nicely(
vec![Set, FailPoint("buffer write"), Restart, Flush, Id],
false,
));
}
|
use gl;
use yaglw::gl_context::GLContext;
use yaglw::shader::Shader;
pub struct T<'a> {
#[allow(missing_docs)]
pub shader: Shader<'a>,
}
pub fn new<'a, 'b:'a>(gl: &'a GLContext) -> T<'b> {
let components = vec!(
(gl::VERTEX_SHADER, "
#version 330 core
void main() {
if (gl_VertexID == 0) {
gl_Position = vec4(1, -1, 0, 1);
} else if (gl_VertexID == 1) {
gl_Position = vec4(1, 1, 0, 1);
} else if (gl_VertexID == 2) {
gl_Position = vec4(-1, -1, 0, 1);
} else if (gl_VertexID == 3) {
gl_Position = vec4(-1, 1, 0, 1);
}
}".to_owned()),
(gl::FRAGMENT_SHADER,
format!(r#"
#version 330 core
uniform vec2 window_size;
uniform struct Sun {{
vec3 direction;
vec3 intensity;
}} sun;
const float sun_angular_radius = 3.14/32;
uniform mat4 projection_matrix;
uniform vec3 eye_position;
uniform float time_ms;
out vec4 frag_color;
// include depth fog
{}
// include cnoise
{}
vec3 pixel_direction(vec2 pixel) {{
// Scale to [0, 1]
pixel /= window_size;
// Scale to [-1, 1]
pixel = 2*pixel - 1;
vec4 p = inverse(projection_matrix) * vec4(pixel, -1, 1);
return normalize(vec3(p / p.w) - eye_position);
}}
float cloud_noise(vec3 seed) {{
float f = cnoise(seed + time_ms / 10000);
// to [0, 1]
f = f / 2 + 0.5;
return f;
}}
void main() {{
vec3 c = sun.intensity;
vec3 direction = pixel_direction(gl_FragCoord.xy);
const int HEIGHTS = 2;
float heights[HEIGHTS] = float[](150, 1000);
vec3 offsets[HEIGHTS] = vec3[](vec3(12,553,239), vec3(-10, 103, 10004));
if (dot(normalize(sun.direction), direction) > cos(sun_angular_radius)) {{
c = vec3(1);
}}
float alpha = 0;
for (int i = 0; i < HEIGHTS; ++i) {{
float cloud_height = heights[i];
float dist = (cloud_height - eye_position.y) / direction.y;
if (dist <= 0 || dist > 1000000) {{
continue;
}} else {{
vec3 seed = (eye_position + dist * direction + offsets[i]) / 1000 * vec3(1, 4, 1);
float f = cloud_noise(seed) * cloud_noise(seed + vec3(-10, -103, 1));
f = f * f;
alpha += f * (1 - fog_density(dist / 8));
}}
}}
alpha = min(alpha, 1);
c = mix(c, vec3(1, 1, 1), alpha);
frag_color = vec4(c, 1);
}}"#,
::shaders::depth_fog::to_string(),
::shaders::noise::cnoise(),
)
),
);
T {
shader: Shader::new(gl, components.into_iter()),
}
}
sun++
use gl;
use yaglw::gl_context::GLContext;
use yaglw::shader::Shader;
pub struct T<'a> {
#[allow(missing_docs)]
pub shader: Shader<'a>,
}
pub fn new<'a, 'b:'a>(gl: &'a GLContext) -> T<'b> {
let components = vec!(
(gl::VERTEX_SHADER, "
#version 330 core
void main() {
if (gl_VertexID == 0) {
gl_Position = vec4(1, -1, 0, 1);
} else if (gl_VertexID == 1) {
gl_Position = vec4(1, 1, 0, 1);
} else if (gl_VertexID == 2) {
gl_Position = vec4(-1, -1, 0, 1);
} else if (gl_VertexID == 3) {
gl_Position = vec4(-1, 1, 0, 1);
}
}".to_owned()),
(gl::FRAGMENT_SHADER,
format!(r#"
#version 330 core
uniform vec2 window_size;
uniform struct Sun {{
vec3 direction;
vec3 intensity;
}} sun;
const float sun_angular_radius = 3.14/32;
uniform mat4 projection_matrix;
uniform vec3 eye_position;
uniform float time_ms;
out vec4 frag_color;
// include depth fog
{}
// include cnoise
{}
vec3 pixel_direction(vec2 pixel) {{
// Scale to [0, 1]
pixel /= window_size;
// Scale to [-1, 1]
pixel = 2*pixel - 1;
vec4 p = inverse(projection_matrix) * vec4(pixel, -1, 1);
return normalize(vec3(p / p.w) - eye_position);
}}
float cloud_noise(vec3 seed) {{
float f = cnoise(seed + time_ms / 10000);
// to [0, 1]
f = f / 2 + 0.5;
return f;
}}
void main() {{
vec3 c = sun.intensity;
vec3 direction = pixel_direction(gl_FragCoord.xy);
const int HEIGHTS = 2;
float heights[HEIGHTS] = float[](150, 1000);
vec3 offsets[HEIGHTS] = vec3[](vec3(12,553,239), vec3(-10, 103, 10004));
float sunniness = exp(32 * (dot(sun.direction, direction) - cos(sun_angular_radius)));
c = mix(c, vec3(1), sunniness);
float alpha = 0;
for (int i = 0; i < HEIGHTS; ++i) {{
float cloud_height = heights[i];
float dist = (cloud_height - eye_position.y) / direction.y;
if (dist <= 0 || dist > 1000000) {{
continue;
}} else {{
vec3 seed = (eye_position + dist * direction + offsets[i]) / 1000 * vec3(1, 4, 1);
float f = cloud_noise(seed) * cloud_noise(seed + vec3(-10, -103, 1));
f = f * f;
alpha += f * (1 - fog_density(dist / 8));
}}
}}
alpha = min(alpha, 1);
c = mix(c, vec3(1, 1, 1), alpha);
frag_color = min(vec4(c, 1), vec4(1));
}}"#,
::shaders::depth_fog::to_string(),
::shaders::noise::cnoise(),
)
),
);
T {
shader: Shader::new(gl, components.into_iter()),
}
}
|
extern crate lp_modeler;
extern crate quote;
use quote::quote;
use lp_modeler::dsl::*;
#[test]
fn test_quotations() {
use LpExpression::*;
let a = LpInteger { name : "a" . to_string ( ) , lower_bound : None , upper_bound : None };
let quoted_a = quote!(#a);
let quoted_a_str = "LpInteger { name : \"a\" . to_string ( ) , lower_bound : None , upper_bound : None }";
assert_eq!(quoted_a.to_string(), quoted_a_str);
let exp : LpExpression = a.clone().into();
let quoted_exp = quote!(#exp);
let quoted_exp_str = "LpExpression :: ConsInt ( ".to_owned() + quoted_a_str + " )";
assert_eq!(quoted_exp.to_string(), quoted_exp_str);
let full_exp = LpExpression::MulExpr ( Box::new ( LpExpression::SubExpr ( Box::new ( LpExpression::EmptyExpr ) , Box::new ( LpExpression::LitVal ( 1f32 ) ) ) ) , Box::new ( LpExpression::AddExpr ( Box::new ( LpExpression::ConsCont ( LpContinuous { name : "x".to_string() , lower_bound : None , upper_bound : None } ) ) , Box::new ( LpExpression::ConsInt ( LpInteger { name : "y".to_string() , lower_bound : None , upper_bound : None } ) ) ) ) );
let full_exp_quoted = quote!(#full_exp);
let full_exp_str = "LpExpression :: MulExpr ( Box :: new ( LpExpression :: SubExpr ( Box :: new ( LpExpression :: EmptyExpr ) , Box :: new ( LpExpression :: LitVal ( 1f32 ) ) ) ) , Box :: new ( LpExpression :: AddExpr ( Box :: new ( LpExpression :: ConsCont ( LpContinuous { name : \"x\" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) , Box :: new ( LpExpression :: ConsInt ( LpInteger { name : \"y\" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) ) ) )";
assert_eq!(full_exp_quoted.to_string(), full_exp_str);
// a.equal(&b);
let a_eq_b = LpConstraint( SubExpr( Box::new ( ConsInt ( LpInteger { name : "a" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) , Box::new ( ConsInt ( LpInteger { name : "b" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) ) , Constraint::Equal , LitVal(0f32));
let quoted_a_eq_b = quote!(#a_eq_b);
let a_eq_b_str = "LpConstraint ( LpExpression :: SubExpr ( Box :: new ( LpExpression :: ConsInt ( LpInteger { name : \"a\" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) , Box :: new ( LpExpression :: ConsInt ( LpInteger { name : \"b\" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) ) , Constraint :: Equal , LpExpression :: LitVal ( 0f32 ) )";
assert_eq!(quoted_a_eq_b.to_string(), a_eq_b_str)
}
remove extra whitespace in expected &strs in test_quotations()
extern crate lp_modeler;
extern crate quote;
use quote::quote;
use lp_modeler::dsl::*;
#[test]
fn test_quotations() {
use LpExpression::*;
let a = LpInteger { name : "a" . to_string ( ) , lower_bound : None , upper_bound : None };
let quoted_a = quote!(#a);
let quoted_a_str = "LpInteger { name : \"a\" . to_string () , lower_bound : None , upper_bound : None }";
assert_eq!(quoted_a.to_string(), quoted_a_str);
let exp : LpExpression = a.clone().into();
let quoted_exp = quote!(#exp);
let quoted_exp_str = "LpExpression :: ConsInt (".to_owned() + quoted_a_str + ")";
assert_eq!(quoted_exp.to_string(), quoted_exp_str);
let full_exp = LpExpression::MulExpr ( Box::new ( LpExpression::SubExpr ( Box::new ( LpExpression::EmptyExpr ) , Box::new ( LpExpression::LitVal ( 1f32 ) ) ) ) , Box::new ( LpExpression::AddExpr ( Box::new ( LpExpression::ConsCont ( LpContinuous { name : "x".to_string() , lower_bound : None , upper_bound : None } ) ) , Box::new ( LpExpression::ConsInt ( LpInteger { name : "y".to_string() , lower_bound : None , upper_bound : None } ) ) ) ) );
let full_exp_quoted = quote!(#full_exp);
let full_exp_str = "LpExpression :: MulExpr (Box :: new (LpExpression :: SubExpr (Box :: new (LpExpression :: EmptyExpr) , Box :: new (LpExpression :: LitVal (1f32)))) , Box :: new (LpExpression :: AddExpr (Box :: new (LpExpression :: ConsCont (LpContinuous { name : \"x\" . to_string () , lower_bound : None , upper_bound : None })) , Box :: new (LpExpression :: ConsInt (LpInteger { name : \"y\" . to_string () , lower_bound : None , upper_bound : None })))))";
assert_eq!(full_exp_quoted.to_string(), full_exp_str);
// a.equal(&b);
let a_eq_b = LpConstraint( SubExpr( Box::new ( ConsInt ( LpInteger { name : "a" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) , Box::new ( ConsInt ( LpInteger { name : "b" . to_string ( ) , lower_bound : None , upper_bound : None } ) ) ) , Constraint::Equal , LitVal(0f32));
let quoted_a_eq_b = quote!(#a_eq_b);
let a_eq_b_str = "LpConstraint (LpExpression :: SubExpr (Box :: new (LpExpression :: ConsInt (LpInteger { name : \"a\" . to_string () , lower_bound : None , upper_bound : None })) , Box :: new (LpExpression :: ConsInt (LpInteger { name : \"b\" . to_string () , lower_bound : None , upper_bound : None }))) , Constraint :: Equal , LpExpression :: LitVal (0f32))";
assert_eq!(quoted_a_eq_b.to_string(), a_eq_b_str)
}
|
#[macro_use]
pub mod sym;
pub mod attrs;
pub mod author;
pub mod camel_case;
pub mod comparisons;
pub mod conf;
pub mod constants;
mod diagnostics;
pub mod higher;
mod hir_utils;
pub mod inspector;
pub mod internal_lints;
pub mod paths;
pub mod ptr;
pub mod sugg;
pub mod usage;
pub use self::attrs::*;
pub use self::diagnostics::*;
pub use self::hir_utils::{SpanlessEq, SpanlessHash};
use std::borrow::Cow;
use std::mem;
use if_chain::if_chain;
use matches::matches;
use rustc::hir;
use rustc::hir::def::{DefKind, Res};
use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::hir::intravisit::{NestedVisitorMap, Visitor};
use rustc::hir::Node;
use rustc::hir::*;
use rustc::lint::{LateContext, Level, Lint, LintContext};
use rustc::traits;
use rustc::ty::{
self,
layout::{self, IntegerExt},
subst::GenericArg,
Binder, Ty, TyCtxt,
};
use rustc_errors::Applicability;
use smallvec::SmallVec;
use syntax::ast::{self, LitKind};
use syntax::attr;
use syntax::source_map::{Span, DUMMY_SP};
use syntax::symbol::{kw, Symbol};
use syntax_pos::hygiene::ExpnKind;
use crate::consts::{constant, Constant};
use crate::reexport::*;
/// Returns `true` if the two spans come from differing expansions (i.e., one is
/// from a macro and one isn't).
#[must_use]
pub fn differing_macro_contexts(lhs: Span, rhs: Span) -> bool {
rhs.ctxt() != lhs.ctxt()
}
/// Returns `true` if the given `NodeId` is inside a constant context
///
/// # Example
///
/// ```rust,ignore
/// if in_constant(cx, expr.hir_id) {
/// // Do something
/// }
/// ```
pub fn in_constant(cx: &LateContext<'_, '_>, id: HirId) -> bool {
let parent_id = cx.tcx.hir().get_parent_item(id);
match cx.tcx.hir().get(parent_id) {
Node::Item(&Item {
kind: ItemKind::Const(..),
..
})
| Node::TraitItem(&TraitItem {
kind: TraitItemKind::Const(..),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Const(..),
..
})
| Node::AnonConst(_)
| Node::Item(&Item {
kind: ItemKind::Static(..),
..
}) => true,
Node::Item(&Item {
kind: ItemKind::Fn(ref sig, ..),
..
}) => sig.header.constness == Constness::Const,
Node::ImplItem(&ImplItem {
kind: ImplItemKind::Method(ref sig, _),
..
}) => sig.header.constness == Constness::Const,
_ => false,
}
}
/// Returns `true` if this `span` was expanded by any macro.
#[must_use]
pub fn in_macro(span: Span) -> bool {
if span.from_expansion() {
if let ExpnKind::Desugaring(..) = span.ctxt().outer_expn_data().kind {
false
} else {
true
}
} else {
false
}
}
// If the snippet is empty, it's an attribute that was inserted during macro
// expansion and we want to ignore those, because they could come from external
// sources that the user has no control over.
// For some reason these attributes don't have any expansion info on them, so
// we have to check it this way until there is a better way.
pub fn is_present_in_source<T: LintContext>(cx: &T, span: Span) -> bool {
if let Some(snippet) = snippet_opt(cx, span) {
if snippet.is_empty() {
return false;
}
}
true
}
/// Checks if type is struct, enum or union type with the given def path.
pub fn match_type(cx: &LateContext<'_, '_>, ty: Ty<'_>, path: &[&str]) -> bool {
match ty.kind {
ty::Adt(adt, _) => match_def_path(cx, adt.did, path),
_ => false,
}
}
/// Checks if the type is equal to a diagnostic item
pub fn is_type_diagnostic_item(cx: &LateContext<'_, '_>, ty: Ty<'_>, diag_item: Symbol) -> bool {
match ty.kind {
ty::Adt(adt, _) => cx.tcx.is_diagnostic_item(diag_item, adt.did),
_ => false,
}
}
/// Checks if the method call given in `expr` belongs to the given trait.
pub fn match_trait_method(cx: &LateContext<'_, '_>, expr: &Expr, path: &[&str]) -> bool {
let def_id = cx.tables.type_dependent_def_id(expr.hir_id).unwrap();
let trt_id = cx.tcx.trait_of_item(def_id);
if let Some(trt_id) = trt_id {
match_def_path(cx, trt_id, path)
} else {
false
}
}
/// Checks if an expression references a variable of the given name.
pub fn match_var(expr: &Expr, var: Name) -> bool {
if let ExprKind::Path(QPath::Resolved(None, ref path)) = expr.kind {
if path.segments.len() == 1 && path.segments[0].ident.name == var {
return true;
}
}
false
}
pub fn last_path_segment(path: &QPath) -> &PathSegment {
match *path {
QPath::Resolved(_, ref path) => path.segments.last().expect("A path must have at least one segment"),
QPath::TypeRelative(_, ref seg) => seg,
}
}
pub fn single_segment_path(path: &QPath) -> Option<&PathSegment> {
match *path {
QPath::Resolved(_, ref path) if path.segments.len() == 1 => Some(&path.segments[0]),
QPath::Resolved(..) => None,
QPath::TypeRelative(_, ref seg) => Some(seg),
}
}
/// Matches a `QPath` against a slice of segment string literals.
///
/// There is also `match_path` if you are dealing with a `rustc::hir::Path` instead of a
/// `rustc::hir::QPath`.
///
/// # Examples
/// ```rust,ignore
/// match_qpath(path, &["std", "rt", "begin_unwind"])
/// ```
pub fn match_qpath(path: &QPath, segments: &[&str]) -> bool {
match *path {
QPath::Resolved(_, ref path) => match_path(path, segments),
QPath::TypeRelative(ref ty, ref segment) => match ty.kind {
TyKind::Path(ref inner_path) => {
!segments.is_empty()
&& match_qpath(inner_path, &segments[..(segments.len() - 1)])
&& segment.ident.name.as_str() == segments[segments.len() - 1]
},
_ => false,
},
}
}
/// Matches a `Path` against a slice of segment string literals.
///
/// There is also `match_qpath` if you are dealing with a `rustc::hir::QPath` instead of a
/// `rustc::hir::Path`.
///
/// # Examples
///
/// ```rust,ignore
/// if match_path(&trait_ref.path, &paths::HASH) {
/// // This is the `std::hash::Hash` trait.
/// }
///
/// if match_path(ty_path, &["rustc", "lint", "Lint"]) {
/// // This is a `rustc::lint::Lint`.
/// }
/// ```
pub fn match_path(path: &Path, segments: &[&str]) -> bool {
path.segments
.iter()
.rev()
.zip(segments.iter().rev())
.all(|(a, b)| a.ident.name.as_str() == *b)
}
/// Matches a `Path` against a slice of segment string literals, e.g.
///
/// # Examples
/// ```rust,ignore
/// match_qpath(path, &["std", "rt", "begin_unwind"])
/// ```
pub fn match_path_ast(path: &ast::Path, segments: &[&str]) -> bool {
path.segments
.iter()
.rev()
.zip(segments.iter().rev())
.all(|(a, b)| a.ident.name.as_str() == *b)
}
/// Gets the definition associated to a path.
pub fn path_to_res(cx: &LateContext<'_, '_>, path: &[&str]) -> Option<def::Res> {
let crates = cx.tcx.crates();
let krate = crates
.iter()
.find(|&&krate| cx.tcx.crate_name(krate).as_str() == path[0]);
if let Some(krate) = krate {
let krate = DefId {
krate: *krate,
index: CRATE_DEF_INDEX,
};
let mut items = cx.tcx.item_children(krate);
let mut path_it = path.iter().skip(1).peekable();
loop {
let segment = match path_it.next() {
Some(segment) => segment,
None => return None,
};
let result = SmallVec::<[_; 8]>::new();
for item in mem::replace(&mut items, cx.tcx.arena.alloc_slice(&result)).iter() {
if item.ident.name.as_str() == *segment {
if path_it.peek().is_none() {
return Some(item.res);
}
items = cx.tcx.item_children(item.res.def_id());
break;
}
}
}
} else {
None
}
}
pub fn qpath_res(cx: &LateContext<'_, '_>, qpath: &hir::QPath, id: hir::HirId) -> Res {
match qpath {
hir::QPath::Resolved(_, path) => path.res,
hir::QPath::TypeRelative(..) => {
if cx.tcx.has_typeck_tables(id.owner_def_id()) {
cx.tcx.typeck_tables_of(id.owner_def_id()).qpath_res(qpath, id)
} else {
Res::Err
}
},
}
}
/// Convenience function to get the `DefId` of a trait by path.
/// It could be a trait or trait alias.
pub fn get_trait_def_id(cx: &LateContext<'_, '_>, path: &[&str]) -> Option<DefId> {
let res = match path_to_res(cx, path) {
Some(res) => res,
None => return None,
};
match res {
Res::Def(DefKind::Trait, trait_id) | Res::Def(DefKind::TraitAlias, trait_id) => Some(trait_id),
Res::Err => unreachable!("this trait resolution is impossible: {:?}", &path),
_ => None,
}
}
/// Checks whether a type implements a trait.
/// See also `get_trait_def_id`.
pub fn implements_trait<'a, 'tcx>(
cx: &LateContext<'a, 'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
ty_params: &[GenericArg<'tcx>],
) -> bool {
let ty = cx.tcx.erase_regions(&ty);
let obligation = cx.tcx.predicate_for_trait_def(
cx.param_env,
traits::ObligationCause::dummy(),
trait_id,
0,
ty,
ty_params,
);
cx.tcx
.infer_ctxt()
.enter(|infcx| infcx.predicate_must_hold_modulo_regions(&obligation))
}
/// Gets the `hir::TraitRef` of the trait the given method is implemented for.
///
/// Use this if you want to find the `TraitRef` of the `Add` trait in this example:
///
/// ```rust
/// struct Point(isize, isize);
///
/// impl std::ops::Add for Point {
/// type Output = Self;
///
/// fn add(self, other: Self) -> Self {
/// Point(0, 0)
/// }
/// }
/// ```
pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'_, 'tcx>, hir_id: HirId) -> Option<&'tcx TraitRef> {
// Get the implemented trait for the current function
let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
if_chain! {
if parent_impl != hir::CRATE_HIR_ID;
if let hir::Node::Item(item) = cx.tcx.hir().get(parent_impl);
if let hir::ItemKind::Impl(_, _, _, _, trait_ref, _, _) = &item.kind;
then { return trait_ref.as_ref(); }
}
None
}
/// Checks whether this type implements `Drop`.
pub fn has_drop<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.ty_adt_def() {
Some(def) => def.has_dtor(cx.tcx),
_ => false,
}
}
/// Returns the method names and argument list of nested method call expressions that make up
/// `expr`. method/span lists are sorted with the most recent call first.
pub fn method_calls(expr: &Expr, max_depth: usize) -> (Vec<Symbol>, Vec<&[Expr]>, Vec<Span>) {
let mut method_names = Vec::with_capacity(max_depth);
let mut arg_lists = Vec::with_capacity(max_depth);
let mut spans = Vec::with_capacity(max_depth);
let mut current = expr;
for _ in 0..max_depth {
if let ExprKind::MethodCall(path, span, args) = ¤t.kind {
if args.iter().any(|e| e.span.from_expansion()) {
break;
}
method_names.push(path.ident.name);
arg_lists.push(&**args);
spans.push(*span);
current = &args[0];
} else {
break;
}
}
(method_names, arg_lists, spans)
}
/// Matches an `Expr` against a chain of methods, and return the matched `Expr`s.
///
/// For example, if `expr` represents the `.baz()` in `foo.bar().baz()`,
/// `matched_method_chain(expr, &["bar", "baz"])` will return a `Vec`
/// containing the `Expr`s for
/// `.bar()` and `.baz()`
pub fn method_chain_args<'a>(expr: &'a Expr, methods: &[&str]) -> Option<Vec<&'a [Expr]>> {
let mut current = expr;
let mut matched = Vec::with_capacity(methods.len());
for method_name in methods.iter().rev() {
// method chains are stored last -> first
if let ExprKind::MethodCall(ref path, _, ref args) = current.kind {
if path.ident.name.as_str() == *method_name {
if args.iter().any(|e| e.span.from_expansion()) {
return None;
}
matched.push(&**args); // build up `matched` backwards
current = &args[0] // go to parent expression
} else {
return None;
}
} else {
return None;
}
}
// Reverse `matched` so that it is in the same order as `methods`.
matched.reverse();
Some(matched)
}
/// Returns `true` if the provided `def_id` is an entrypoint to a program.
pub fn is_entrypoint_fn(cx: &LateContext<'_, '_>, def_id: DefId) -> bool {
cx.tcx
.entry_fn(LOCAL_CRATE)
.map_or(false, |(entry_fn_def_id, _)| def_id == entry_fn_def_id)
}
/// Gets the name of the item the expression is in, if available.
pub fn get_item_name(cx: &LateContext<'_, '_>, expr: &Expr) -> Option<Name> {
let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id);
match cx.tcx.hir().find(parent_id) {
Some(Node::Item(&Item { ref ident, .. })) => Some(ident.name),
Some(Node::TraitItem(&TraitItem { ident, .. })) | Some(Node::ImplItem(&ImplItem { ident, .. })) => {
Some(ident.name)
},
_ => None,
}
}
/// Gets the name of a `Pat`, if any.
pub fn get_pat_name(pat: &Pat) -> Option<Name> {
match pat.kind {
PatKind::Binding(.., ref spname, _) => Some(spname.name),
PatKind::Path(ref qpath) => single_segment_path(qpath).map(|ps| ps.ident.name),
PatKind::Box(ref p) | PatKind::Ref(ref p, _) => get_pat_name(&*p),
_ => None,
}
}
struct ContainsName {
name: Name,
result: bool,
}
impl<'tcx> Visitor<'tcx> for ContainsName {
fn visit_name(&mut self, _: Span, name: Name) {
if self.name == name {
self.result = true;
}
}
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
}
/// Checks if an `Expr` contains a certain name.
pub fn contains_name(name: Name, expr: &Expr) -> bool {
let mut cn = ContainsName { name, result: false };
cn.visit_expr(expr);
cn.result
}
/// Converts a span to a code snippet if available, otherwise use default.
///
/// This is useful if you want to provide suggestions for your lint or more generally, if you want
/// to convert a given `Span` to a `str`.
///
/// # Example
/// ```rust,ignore
/// snippet(cx, expr.span, "..")
/// ```
pub fn snippet<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
snippet_opt(cx, span).map_or_else(|| Cow::Borrowed(default), From::from)
}
/// Same as `snippet`, but it adapts the applicability level by following rules:
///
/// - Applicability level `Unspecified` will never be changed.
/// - If the span is inside a macro, change the applicability level to `MaybeIncorrect`.
/// - If the default value is used and the applicability level is `MachineApplicable`, change it to
/// `HasPlaceholders`
pub fn snippet_with_applicability<'a, T: LintContext>(
cx: &T,
span: Span,
default: &'a str,
applicability: &mut Applicability,
) -> Cow<'a, str> {
if *applicability != Applicability::Unspecified && span.from_expansion() {
*applicability = Applicability::MaybeIncorrect;
}
snippet_opt(cx, span).map_or_else(
|| {
if *applicability == Applicability::MachineApplicable {
*applicability = Applicability::HasPlaceholders;
}
Cow::Borrowed(default)
},
From::from,
)
}
/// Same as `snippet`, but should only be used when it's clear that the input span is
/// not a macro argument.
pub fn snippet_with_macro_callsite<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
snippet(cx, span.source_callsite(), default)
}
/// Converts a span to a code snippet. Returns `None` if not available.
pub fn snippet_opt<T: LintContext>(cx: &T, span: Span) -> Option<String> {
cx.sess().source_map().span_to_snippet(span).ok()
}
/// Converts a span (from a block) to a code snippet if available, otherwise use
/// default.
/// This trims the code of indentation, except for the first line. Use it for
/// blocks or block-like
/// things which need to be printed as such.
///
/// # Example
/// ```rust,ignore
/// snippet_block(cx, expr.span, "..")
/// ```
pub fn snippet_block<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
let snip = snippet(cx, span, default);
trim_multiline(snip, true)
}
/// Same as `snippet_block`, but adapts the applicability level by the rules of
/// `snippet_with_applicabiliy`.
pub fn snippet_block_with_applicability<'a, T: LintContext>(
cx: &T,
span: Span,
default: &'a str,
applicability: &mut Applicability,
) -> Cow<'a, str> {
let snip = snippet_with_applicability(cx, span, default, applicability);
trim_multiline(snip, true)
}
/// Returns a new Span that covers the full last line of the given Span
pub fn last_line_of_span<T: LintContext>(cx: &T, span: Span) -> Span {
let source_map_and_line = cx.sess().source_map().lookup_line(span.lo()).unwrap();
let line_no = source_map_and_line.line;
let line_start = &source_map_and_line.sf.lines[line_no];
Span::new(*line_start, span.hi(), span.ctxt())
}
/// Like `snippet_block`, but add braces if the expr is not an `ExprKind::Block`.
/// Also takes an `Option<String>` which can be put inside the braces.
pub fn expr_block<'a, T: LintContext>(cx: &T, expr: &Expr, option: Option<String>, default: &'a str) -> Cow<'a, str> {
let code = snippet_block(cx, expr.span, default);
let string = option.unwrap_or_default();
if expr.span.from_expansion() {
Cow::Owned(format!("{{ {} }}", snippet_with_macro_callsite(cx, expr.span, default)))
} else if let ExprKind::Block(_, _) = expr.kind {
Cow::Owned(format!("{}{}", code, string))
} else if string.is_empty() {
Cow::Owned(format!("{{ {} }}", code))
} else {
Cow::Owned(format!("{{\n{};\n{}\n}}", code, string))
}
}
/// Trim indentation from a multiline string with possibility of ignoring the
/// first line.
pub fn trim_multiline(s: Cow<'_, str>, ignore_first: bool) -> Cow<'_, str> {
let s_space = trim_multiline_inner(s, ignore_first, ' ');
let s_tab = trim_multiline_inner(s_space, ignore_first, '\t');
trim_multiline_inner(s_tab, ignore_first, ' ')
}
fn trim_multiline_inner(s: Cow<'_, str>, ignore_first: bool, ch: char) -> Cow<'_, str> {
let x = s
.lines()
.skip(ignore_first as usize)
.filter_map(|l| {
if l.is_empty() {
None
} else {
// ignore empty lines
Some(l.char_indices().find(|&(_, x)| x != ch).unwrap_or((l.len(), ch)).0)
}
})
.min()
.unwrap_or(0);
if x > 0 {
Cow::Owned(
s.lines()
.enumerate()
.map(|(i, l)| {
if (ignore_first && i == 0) || l.is_empty() {
l
} else {
l.split_at(x).1
}
})
.collect::<Vec<_>>()
.join("\n"),
)
} else {
s
}
}
/// Gets the parent expression, if any –- this is useful to constrain a lint.
pub fn get_parent_expr<'c>(cx: &'c LateContext<'_, '_>, e: &Expr) -> Option<&'c Expr> {
let map = &cx.tcx.hir();
let hir_id = e.hir_id;
let parent_id = map.get_parent_node(hir_id);
if hir_id == parent_id {
return None;
}
map.find(parent_id).and_then(|node| {
if let Node::Expr(parent) = node {
Some(parent)
} else {
None
}
})
}
pub fn get_enclosing_block<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, hir_id: HirId) -> Option<&'tcx Block> {
let map = &cx.tcx.hir();
let enclosing_node = map
.get_enclosing_scope(hir_id)
.and_then(|enclosing_id| map.find(enclosing_id));
if let Some(node) = enclosing_node {
match node {
Node::Block(block) => Some(block),
Node::Item(&Item {
kind: ItemKind::Fn(_, _, eid),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Method(_, eid),
..
}) => match cx.tcx.hir().body(eid).value.kind {
ExprKind::Block(ref block, _) => Some(block),
_ => None,
},
_ => None,
}
} else {
None
}
}
/// Returns the base type for HIR references and pointers.
pub fn walk_ptrs_hir_ty(ty: &hir::Ty) -> &hir::Ty {
match ty.kind {
TyKind::Ptr(ref mut_ty) | TyKind::Rptr(_, ref mut_ty) => walk_ptrs_hir_ty(&mut_ty.ty),
_ => ty,
}
}
/// Returns the base type for references and raw pointers.
pub fn walk_ptrs_ty(ty: Ty<'_>) -> Ty<'_> {
match ty.kind {
ty::Ref(_, ty, _) => walk_ptrs_ty(ty),
_ => ty,
}
}
/// Returns the base type for references and raw pointers, and count reference
/// depth.
pub fn walk_ptrs_ty_depth(ty: Ty<'_>) -> (Ty<'_>, usize) {
fn inner(ty: Ty<'_>, depth: usize) -> (Ty<'_>, usize) {
match ty.kind {
ty::Ref(_, ty, _) => inner(ty, depth + 1),
_ => (ty, depth),
}
}
inner(ty, 0)
}
/// Checks whether the given expression is a constant integer of the given value.
/// unlike `is_integer_literal`, this version does const folding
pub fn is_integer_const(cx: &LateContext<'_, '_>, e: &Expr, value: u128) -> bool {
if is_integer_literal(e, value) {
return true;
}
let map = cx.tcx.hir();
let parent_item = map.get_parent_item(e.hir_id);
if let Some((Constant::Int(v), _)) = map
.maybe_body_owned_by(parent_item)
.and_then(|body_id| constant(cx, cx.tcx.body_tables(body_id), e))
{
value == v
} else {
false
}
}
/// Checks whether the given expression is a constant literal of the given value.
pub fn is_integer_literal(expr: &Expr, value: u128) -> bool {
// FIXME: use constant folding
if let ExprKind::Lit(ref spanned) = expr.kind {
if let LitKind::Int(v, _) = spanned.node {
return v == value;
}
}
false
}
/// Returns `true` if the given `Expr` has been coerced before.
///
/// Examples of coercions can be found in the Nomicon at
/// <https://doc.rust-lang.org/nomicon/coercions.html>.
///
/// See `rustc::ty::adjustment::Adjustment` and `rustc_typeck::check::coercion` for more
/// information on adjustments and coercions.
pub fn is_adjusted(cx: &LateContext<'_, '_>, e: &Expr) -> bool {
cx.tables.adjustments().get(e.hir_id).is_some()
}
/// Returns the pre-expansion span if is this comes from an expansion of the
/// macro `name`.
/// See also `is_direct_expn_of`.
#[must_use]
pub fn is_expn_of(mut span: Span, name: &str) -> Option<Span> {
loop {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let mac_name = data.kind.descr();
let new_span = data.call_site;
if mac_name.as_str() == name {
return Some(new_span);
} else {
span = new_span;
}
} else {
return None;
}
}
}
/// Returns the pre-expansion span if the span directly comes from an expansion
/// of the macro `name`.
/// The difference with `is_expn_of` is that in
/// ```rust,ignore
/// foo!(bar!(42));
/// ```
/// `42` is considered expanded from `foo!` and `bar!` by `is_expn_of` but only
/// `bar!` by
/// `is_direct_expn_of`.
#[must_use]
pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let mac_name = data.kind.descr();
let new_span = data.call_site;
if mac_name.as_str() == name {
Some(new_span)
} else {
None
}
} else {
None
}
}
/// Convenience function to get the return type of a function.
pub fn return_ty<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, fn_item: hir::HirId) -> Ty<'tcx> {
let fn_def_id = cx.tcx.hir().local_def_id(fn_item);
let ret_ty = cx.tcx.fn_sig(fn_def_id).output();
cx.tcx.erase_late_bound_regions(&ret_ty)
}
/// Checks if two types are the same.
///
/// This discards any lifetime annotations, too.
//
// FIXME: this works correctly for lifetimes bounds (`for <'a> Foo<'a>` ==
// `for <'b> Foo<'b>`, but not for type parameters).
pub fn same_tys<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
let a = cx.tcx.erase_late_bound_regions(&Binder::bind(a));
let b = cx.tcx.erase_late_bound_regions(&Binder::bind(b));
cx.tcx
.infer_ctxt()
.enter(|infcx| infcx.can_eq(cx.param_env, a, b).is_ok())
}
/// Returns `true` if the given type is an `unsafe` function.
pub fn type_is_unsafe_function<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.kind {
ty::FnDef(..) | ty::FnPtr(_) => ty.fn_sig(cx.tcx).unsafety() == Unsafety::Unsafe,
_ => false,
}
}
pub fn is_copy<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
ty.is_copy_modulo_regions(cx.tcx, cx.param_env, DUMMY_SP)
}
/// Checks if an expression is constructing a tuple-like enum variant or struct
pub fn is_ctor_or_promotable_const_function(cx: &LateContext<'_, '_>, expr: &Expr) -> bool {
if let ExprKind::Call(ref fun, _) = expr.kind {
if let ExprKind::Path(ref qp) = fun.kind {
let res = cx.tables.qpath_res(qp, fun.hir_id);
return match res {
def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(..), _) => true,
def::Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id),
_ => false,
};
}
}
false
}
/// Returns `true` if a pattern is refutable.
pub fn is_refutable(cx: &LateContext<'_, '_>, pat: &Pat) -> bool {
fn is_enum_variant(cx: &LateContext<'_, '_>, qpath: &QPath, id: HirId) -> bool {
matches!(
cx.tables.qpath_res(qpath, id),
def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(def::CtorOf::Variant, _), _)
)
}
fn are_refutable<'a, I: Iterator<Item = &'a Pat>>(cx: &LateContext<'_, '_>, mut i: I) -> bool {
i.any(|pat| is_refutable(cx, pat))
}
match pat.kind {
PatKind::Binding(..) | PatKind::Wild => false,
PatKind::Box(ref pat) | PatKind::Ref(ref pat, _) => is_refutable(cx, pat),
PatKind::Lit(..) | PatKind::Range(..) => true,
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(ref pats) | PatKind::Tuple(ref pats, _) => are_refutable(cx, pats.iter().map(|pat| &**pat)),
PatKind::Struct(ref qpath, ref fields, _) => {
if is_enum_variant(cx, qpath, pat.hir_id) {
true
} else {
are_refutable(cx, fields.iter().map(|field| &*field.pat))
}
},
PatKind::TupleStruct(ref qpath, ref pats, _) => {
if is_enum_variant(cx, qpath, pat.hir_id) {
true
} else {
are_refutable(cx, pats.iter().map(|pat| &**pat))
}
},
PatKind::Slice(ref head, ref middle, ref tail) => {
are_refutable(cx, head.iter().chain(middle).chain(tail.iter()).map(|pat| &**pat))
},
}
}
/// Checks for the `#[automatically_derived]` attribute all `#[derive]`d
/// implementations have.
pub fn is_automatically_derived(attrs: &[ast::Attribute]) -> bool {
attr::contains_name(attrs, sym!(automatically_derived))
}
/// Remove blocks around an expression.
///
/// Ie. `x`, `{ x }` and `{{{{ x }}}}` all give `x`. `{ x; y }` and `{}` return
/// themselves.
pub fn remove_blocks(expr: &Expr) -> &Expr {
if let ExprKind::Block(ref block, _) = expr.kind {
if block.stmts.is_empty() {
if let Some(ref expr) = block.expr {
remove_blocks(expr)
} else {
expr
}
} else {
expr
}
} else {
expr
}
}
pub fn is_self(slf: &Param) -> bool {
if let PatKind::Binding(.., name, _) = slf.pat.kind {
name.name == kw::SelfLower
} else {
false
}
}
pub fn is_self_ty(slf: &hir::Ty) -> bool {
if_chain! {
if let TyKind::Path(ref qp) = slf.kind;
if let QPath::Resolved(None, ref path) = *qp;
if let Res::SelfTy(..) = path.res;
then {
return true
}
}
false
}
pub fn iter_input_pats<'tcx>(decl: &FnDecl, body: &'tcx Body) -> impl Iterator<Item = &'tcx Param> {
(0..decl.inputs.len()).map(move |i| &body.params[i])
}
/// Checks if a given expression is a match expression expanded from the `?`
/// operator or the `try` macro.
pub fn is_try(expr: &Expr) -> Option<&Expr> {
fn is_ok(arm: &Arm) -> bool {
if_chain! {
if let PatKind::TupleStruct(ref path, ref pat, None) = arm.pat.kind;
if match_qpath(path, &paths::RESULT_OK[1..]);
if let PatKind::Binding(_, hir_id, _, None) = pat[0].kind;
if let ExprKind::Path(QPath::Resolved(None, ref path)) = arm.body.kind;
if let Res::Local(lid) = path.res;
if lid == hir_id;
then {
return true;
}
}
false
}
fn is_err(arm: &Arm) -> bool {
if let PatKind::TupleStruct(ref path, _, _) = arm.pat.kind {
match_qpath(path, &paths::RESULT_ERR[1..])
} else {
false
}
}
if let ExprKind::Match(_, ref arms, ref source) = expr.kind {
// desugared from a `?` operator
if let MatchSource::TryDesugar = *source {
return Some(expr);
}
if_chain! {
if arms.len() == 2;
if arms[0].guard.is_none();
if arms[1].guard.is_none();
if (is_ok(&arms[0]) && is_err(&arms[1])) ||
(is_ok(&arms[1]) && is_err(&arms[0]));
then {
return Some(expr);
}
}
}
None
}
/// Returns `true` if the lint is allowed in the current context
///
/// Useful for skipping long running code when it's unnecessary
pub fn is_allowed(cx: &LateContext<'_, '_>, lint: &'static Lint, id: HirId) -> bool {
cx.tcx.lint_level_at_node(lint, id).0 == Level::Allow
}
pub fn get_arg_name(pat: &Pat) -> Option<ast::Name> {
match pat.kind {
PatKind::Binding(.., ident, None) => Some(ident.name),
PatKind::Ref(ref subpat, _) => get_arg_name(subpat),
_ => None,
}
}
pub fn int_bits(tcx: TyCtxt<'_>, ity: ast::IntTy) -> u64 {
layout::Integer::from_attr(&tcx, attr::IntType::SignedInt(ity))
.size()
.bits()
}
#[allow(clippy::cast_possible_wrap)]
/// Turn a constant int byte representation into an i128
pub fn sext(tcx: TyCtxt<'_>, u: u128, ity: ast::IntTy) -> i128 {
let amt = 128 - int_bits(tcx, ity);
((u as i128) << amt) >> amt
}
#[allow(clippy::cast_sign_loss)]
/// clip unused bytes
pub fn unsext(tcx: TyCtxt<'_>, u: i128, ity: ast::IntTy) -> u128 {
let amt = 128 - int_bits(tcx, ity);
((u as u128) << amt) >> amt
}
/// clip unused bytes
pub fn clip(tcx: TyCtxt<'_>, u: u128, ity: ast::UintTy) -> u128 {
let bits = layout::Integer::from_attr(&tcx, attr::IntType::UnsignedInt(ity))
.size()
.bits();
let amt = 128 - bits;
(u << amt) >> amt
}
/// Removes block comments from the given `Vec` of lines.
///
/// # Examples
///
/// ```rust,ignore
/// without_block_comments(vec!["/*", "foo", "*/"]);
/// // => vec![]
///
/// without_block_comments(vec!["bar", "/*", "foo", "*/"]);
/// // => vec!["bar"]
/// ```
pub fn without_block_comments(lines: Vec<&str>) -> Vec<&str> {
let mut without = vec![];
let mut nest_level = 0;
for line in lines {
if line.contains("/*") {
nest_level += 1;
continue;
} else if line.contains("*/") {
nest_level -= 1;
continue;
}
if nest_level == 0 {
without.push(line);
}
}
without
}
pub fn any_parent_is_automatically_derived(tcx: TyCtxt<'_>, node: HirId) -> bool {
let map = &tcx.hir();
let mut prev_enclosing_node = None;
let mut enclosing_node = node;
while Some(enclosing_node) != prev_enclosing_node {
if is_automatically_derived(map.attrs(enclosing_node)) {
return true;
}
prev_enclosing_node = Some(enclosing_node);
enclosing_node = map.get_parent_item(enclosing_node);
}
false
}
/// Returns true if ty has `iter` or `iter_mut` methods
pub fn has_iter_method(cx: &LateContext<'_, '_>, probably_ref_ty: Ty<'_>) -> Option<&'static str> {
// FIXME: instead of this hard-coded list, we should check if `<adt>::iter`
// exists and has the desired signature. Unfortunately FnCtxt is not exported
// so we can't use its `lookup_method` method.
let into_iter_collections: [&[&str]; 13] = [
&paths::VEC,
&paths::OPTION,
&paths::RESULT,
&paths::BTREESET,
&paths::BTREEMAP,
&paths::VEC_DEQUE,
&paths::LINKED_LIST,
&paths::BINARY_HEAP,
&paths::HASHSET,
&paths::HASHMAP,
&paths::PATH_BUF,
&paths::PATH,
&paths::RECEIVER,
];
let ty_to_check = match probably_ref_ty.kind {
ty::Ref(_, ty_to_check, _) => ty_to_check,
_ => probably_ref_ty,
};
let def_id = match ty_to_check.kind {
ty::Array(..) => return Some("array"),
ty::Slice(..) => return Some("slice"),
ty::Adt(adt, _) => adt.did,
_ => return None,
};
for path in &into_iter_collections {
if match_def_path(cx, def_id, path) {
return Some(*path.last().unwrap());
}
}
None
}
/// Matches a function call with the given path and returns the arguments.
///
/// Usage:
///
/// ```rust,ignore
/// if let Some(args) = match_function_call(cx, begin_panic_call, &paths::BEGIN_PANIC);
/// ```
pub fn match_function_call<'a, 'tcx>(
cx: &LateContext<'a, 'tcx>,
expr: &'tcx Expr,
path: &[&str],
) -> Option<&'tcx [Expr]> {
if_chain! {
if let ExprKind::Call(ref fun, ref args) = expr.kind;
if let ExprKind::Path(ref qpath) = fun.kind;
if let Some(fun_def_id) = cx.tables.qpath_res(qpath, fun.hir_id).opt_def_id();
if match_def_path(cx, fun_def_id, path);
then {
return Some(&args)
}
};
None
}
#[cfg(test)]
mod test {
use super::{trim_multiline, without_block_comments};
#[test]
fn test_trim_multiline_single_line() {
assert_eq!("", trim_multiline("".into(), false));
assert_eq!("...", trim_multiline("...".into(), false));
assert_eq!("...", trim_multiline(" ...".into(), false));
assert_eq!("...", trim_multiline("\t...".into(), false));
assert_eq!("...", trim_multiline("\t\t...".into(), false));
}
#[test]
#[rustfmt::skip]
fn test_trim_multiline_block() {
assert_eq!("\
if x {
y
} else {
z
}", trim_multiline(" if x {
y
} else {
z
}".into(), false));
assert_eq!("\
if x {
\ty
} else {
\tz
}", trim_multiline(" if x {
\ty
} else {
\tz
}".into(), false));
}
#[test]
#[rustfmt::skip]
fn test_trim_multiline_empty_line() {
assert_eq!("\
if x {
y
} else {
z
}", trim_multiline(" if x {
y
} else {
z
}".into(), false));
}
#[test]
fn test_without_block_comments_lines_without_block_comments() {
let result = without_block_comments(vec!["/*", "", "*/"]);
println!("result: {:?}", result);
assert!(result.is_empty());
let result = without_block_comments(vec!["", "/*", "", "*/", "#[crate_type = \"lib\"]", "/*", "", "*/", ""]);
assert_eq!(result, vec!["", "#[crate_type = \"lib\"]", ""]);
let result = without_block_comments(vec!["/* rust", "", "*/"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* one-line comment */"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* nested", "/* multi-line", "comment", "*/", "test", "*/"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* nested /* inline /* comment */ test */ */"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["foo", "bar", "baz"]);
assert_eq!(result, vec!["foo", "bar", "baz"]);
}
}
pub fn match_def_path<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, did: DefId, syms: &[&str]) -> bool {
let path = cx.get_def_path(did);
path.len() == syms.len() && path.into_iter().zip(syms.iter()).all(|(a, &b)| a.as_str() == b)
}
/// Returns the list of condition expressions and the list of blocks in a
/// sequence of `if/else`.
/// E.g., this returns `([a, b], [c, d, e])` for the expression
/// `if a { c } else if b { d } else { e }`.
pub fn if_sequence(mut expr: &Expr) -> (SmallVec<[&Expr; 1]>, SmallVec<[&Block; 1]>) {
let mut conds = SmallVec::new();
let mut blocks: SmallVec<[&Block; 1]> = SmallVec::new();
while let Some((ref cond, ref then_expr, ref else_expr)) = higher::if_block(&expr) {
conds.push(&**cond);
if let ExprKind::Block(ref block, _) = then_expr.kind {
blocks.push(block);
} else {
panic!("ExprKind::If node is not an ExprKind::Block");
}
if let Some(ref else_expr) = *else_expr {
expr = else_expr;
} else {
break;
}
}
// final `else {..}`
if !blocks.is_empty() {
if let ExprKind::Block(ref block, _) = expr.kind {
blocks.push(&**block);
}
}
(conds, blocks)
}
pub fn parent_node_is_if_expr<'a, 'b>(expr: &Expr, cx: &LateContext<'a, 'b>) -> bool {
let parent_id = cx.tcx.hir().get_parent_node(expr.hir_id);
let parent_node = cx.tcx.hir().get(parent_id);
match parent_node {
rustc::hir::Node::Expr(e) => higher::if_block(&e).is_some(),
rustc::hir::Node::Arm(e) => higher::if_block(&e.body).is_some(),
_ => false,
}
}
dogfood
#[macro_use]
pub mod sym;
pub mod attrs;
pub mod author;
pub mod camel_case;
pub mod comparisons;
pub mod conf;
pub mod constants;
mod diagnostics;
pub mod higher;
mod hir_utils;
pub mod inspector;
pub mod internal_lints;
pub mod paths;
pub mod ptr;
pub mod sugg;
pub mod usage;
pub use self::attrs::*;
pub use self::diagnostics::*;
pub use self::hir_utils::{SpanlessEq, SpanlessHash};
use std::borrow::Cow;
use std::mem;
use if_chain::if_chain;
use matches::matches;
use rustc::hir;
use rustc::hir::def::{DefKind, Res};
use rustc::hir::def_id::{DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::hir::intravisit::{NestedVisitorMap, Visitor};
use rustc::hir::Node;
use rustc::hir::*;
use rustc::lint::{LateContext, Level, Lint, LintContext};
use rustc::traits;
use rustc::ty::{
self,
layout::{self, IntegerExt},
subst::GenericArg,
Binder, Ty, TyCtxt,
};
use rustc_errors::Applicability;
use smallvec::SmallVec;
use syntax::ast::{self, LitKind};
use syntax::attr;
use syntax::source_map::{Span, DUMMY_SP};
use syntax::symbol::{kw, Symbol};
use syntax_pos::hygiene::ExpnKind;
use crate::consts::{constant, Constant};
use crate::reexport::*;
/// Returns `true` if the two spans come from differing expansions (i.e., one is
/// from a macro and one isn't).
#[must_use]
pub fn differing_macro_contexts(lhs: Span, rhs: Span) -> bool {
rhs.ctxt() != lhs.ctxt()
}
/// Returns `true` if the given `NodeId` is inside a constant context
///
/// # Example
///
/// ```rust,ignore
/// if in_constant(cx, expr.hir_id) {
/// // Do something
/// }
/// ```
pub fn in_constant(cx: &LateContext<'_, '_>, id: HirId) -> bool {
let parent_id = cx.tcx.hir().get_parent_item(id);
match cx.tcx.hir().get(parent_id) {
Node::Item(&Item {
kind: ItemKind::Const(..),
..
})
| Node::TraitItem(&TraitItem {
kind: TraitItemKind::Const(..),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Const(..),
..
})
| Node::AnonConst(_)
| Node::Item(&Item {
kind: ItemKind::Static(..),
..
}) => true,
Node::Item(&Item {
kind: ItemKind::Fn(ref sig, ..),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Method(ref sig, _),
..
}) => sig.header.constness == Constness::Const,
_ => false,
}
}
/// Returns `true` if this `span` was expanded by any macro.
#[must_use]
pub fn in_macro(span: Span) -> bool {
if span.from_expansion() {
if let ExpnKind::Desugaring(..) = span.ctxt().outer_expn_data().kind {
false
} else {
true
}
} else {
false
}
}
// If the snippet is empty, it's an attribute that was inserted during macro
// expansion and we want to ignore those, because they could come from external
// sources that the user has no control over.
// For some reason these attributes don't have any expansion info on them, so
// we have to check it this way until there is a better way.
pub fn is_present_in_source<T: LintContext>(cx: &T, span: Span) -> bool {
if let Some(snippet) = snippet_opt(cx, span) {
if snippet.is_empty() {
return false;
}
}
true
}
/// Checks if type is struct, enum or union type with the given def path.
pub fn match_type(cx: &LateContext<'_, '_>, ty: Ty<'_>, path: &[&str]) -> bool {
match ty.kind {
ty::Adt(adt, _) => match_def_path(cx, adt.did, path),
_ => false,
}
}
/// Checks if the type is equal to a diagnostic item
pub fn is_type_diagnostic_item(cx: &LateContext<'_, '_>, ty: Ty<'_>, diag_item: Symbol) -> bool {
match ty.kind {
ty::Adt(adt, _) => cx.tcx.is_diagnostic_item(diag_item, adt.did),
_ => false,
}
}
/// Checks if the method call given in `expr` belongs to the given trait.
pub fn match_trait_method(cx: &LateContext<'_, '_>, expr: &Expr, path: &[&str]) -> bool {
let def_id = cx.tables.type_dependent_def_id(expr.hir_id).unwrap();
let trt_id = cx.tcx.trait_of_item(def_id);
if let Some(trt_id) = trt_id {
match_def_path(cx, trt_id, path)
} else {
false
}
}
/// Checks if an expression references a variable of the given name.
pub fn match_var(expr: &Expr, var: Name) -> bool {
if let ExprKind::Path(QPath::Resolved(None, ref path)) = expr.kind {
if path.segments.len() == 1 && path.segments[0].ident.name == var {
return true;
}
}
false
}
pub fn last_path_segment(path: &QPath) -> &PathSegment {
match *path {
QPath::Resolved(_, ref path) => path.segments.last().expect("A path must have at least one segment"),
QPath::TypeRelative(_, ref seg) => seg,
}
}
pub fn single_segment_path(path: &QPath) -> Option<&PathSegment> {
match *path {
QPath::Resolved(_, ref path) if path.segments.len() == 1 => Some(&path.segments[0]),
QPath::Resolved(..) => None,
QPath::TypeRelative(_, ref seg) => Some(seg),
}
}
/// Matches a `QPath` against a slice of segment string literals.
///
/// There is also `match_path` if you are dealing with a `rustc::hir::Path` instead of a
/// `rustc::hir::QPath`.
///
/// # Examples
/// ```rust,ignore
/// match_qpath(path, &["std", "rt", "begin_unwind"])
/// ```
pub fn match_qpath(path: &QPath, segments: &[&str]) -> bool {
match *path {
QPath::Resolved(_, ref path) => match_path(path, segments),
QPath::TypeRelative(ref ty, ref segment) => match ty.kind {
TyKind::Path(ref inner_path) => {
!segments.is_empty()
&& match_qpath(inner_path, &segments[..(segments.len() - 1)])
&& segment.ident.name.as_str() == segments[segments.len() - 1]
},
_ => false,
},
}
}
/// Matches a `Path` against a slice of segment string literals.
///
/// There is also `match_qpath` if you are dealing with a `rustc::hir::QPath` instead of a
/// `rustc::hir::Path`.
///
/// # Examples
///
/// ```rust,ignore
/// if match_path(&trait_ref.path, &paths::HASH) {
/// // This is the `std::hash::Hash` trait.
/// }
///
/// if match_path(ty_path, &["rustc", "lint", "Lint"]) {
/// // This is a `rustc::lint::Lint`.
/// }
/// ```
pub fn match_path(path: &Path, segments: &[&str]) -> bool {
path.segments
.iter()
.rev()
.zip(segments.iter().rev())
.all(|(a, b)| a.ident.name.as_str() == *b)
}
/// Matches a `Path` against a slice of segment string literals, e.g.
///
/// # Examples
/// ```rust,ignore
/// match_qpath(path, &["std", "rt", "begin_unwind"])
/// ```
pub fn match_path_ast(path: &ast::Path, segments: &[&str]) -> bool {
path.segments
.iter()
.rev()
.zip(segments.iter().rev())
.all(|(a, b)| a.ident.name.as_str() == *b)
}
/// Gets the definition associated to a path.
pub fn path_to_res(cx: &LateContext<'_, '_>, path: &[&str]) -> Option<def::Res> {
let crates = cx.tcx.crates();
let krate = crates
.iter()
.find(|&&krate| cx.tcx.crate_name(krate).as_str() == path[0]);
if let Some(krate) = krate {
let krate = DefId {
krate: *krate,
index: CRATE_DEF_INDEX,
};
let mut items = cx.tcx.item_children(krate);
let mut path_it = path.iter().skip(1).peekable();
loop {
let segment = match path_it.next() {
Some(segment) => segment,
None => return None,
};
let result = SmallVec::<[_; 8]>::new();
for item in mem::replace(&mut items, cx.tcx.arena.alloc_slice(&result)).iter() {
if item.ident.name.as_str() == *segment {
if path_it.peek().is_none() {
return Some(item.res);
}
items = cx.tcx.item_children(item.res.def_id());
break;
}
}
}
} else {
None
}
}
pub fn qpath_res(cx: &LateContext<'_, '_>, qpath: &hir::QPath, id: hir::HirId) -> Res {
match qpath {
hir::QPath::Resolved(_, path) => path.res,
hir::QPath::TypeRelative(..) => {
if cx.tcx.has_typeck_tables(id.owner_def_id()) {
cx.tcx.typeck_tables_of(id.owner_def_id()).qpath_res(qpath, id)
} else {
Res::Err
}
},
}
}
/// Convenience function to get the `DefId` of a trait by path.
/// It could be a trait or trait alias.
pub fn get_trait_def_id(cx: &LateContext<'_, '_>, path: &[&str]) -> Option<DefId> {
let res = match path_to_res(cx, path) {
Some(res) => res,
None => return None,
};
match res {
Res::Def(DefKind::Trait, trait_id) | Res::Def(DefKind::TraitAlias, trait_id) => Some(trait_id),
Res::Err => unreachable!("this trait resolution is impossible: {:?}", &path),
_ => None,
}
}
/// Checks whether a type implements a trait.
/// See also `get_trait_def_id`.
pub fn implements_trait<'a, 'tcx>(
cx: &LateContext<'a, 'tcx>,
ty: Ty<'tcx>,
trait_id: DefId,
ty_params: &[GenericArg<'tcx>],
) -> bool {
let ty = cx.tcx.erase_regions(&ty);
let obligation = cx.tcx.predicate_for_trait_def(
cx.param_env,
traits::ObligationCause::dummy(),
trait_id,
0,
ty,
ty_params,
);
cx.tcx
.infer_ctxt()
.enter(|infcx| infcx.predicate_must_hold_modulo_regions(&obligation))
}
/// Gets the `hir::TraitRef` of the trait the given method is implemented for.
///
/// Use this if you want to find the `TraitRef` of the `Add` trait in this example:
///
/// ```rust
/// struct Point(isize, isize);
///
/// impl std::ops::Add for Point {
/// type Output = Self;
///
/// fn add(self, other: Self) -> Self {
/// Point(0, 0)
/// }
/// }
/// ```
pub fn trait_ref_of_method<'tcx>(cx: &LateContext<'_, 'tcx>, hir_id: HirId) -> Option<&'tcx TraitRef> {
// Get the implemented trait for the current function
let parent_impl = cx.tcx.hir().get_parent_item(hir_id);
if_chain! {
if parent_impl != hir::CRATE_HIR_ID;
if let hir::Node::Item(item) = cx.tcx.hir().get(parent_impl);
if let hir::ItemKind::Impl(_, _, _, _, trait_ref, _, _) = &item.kind;
then { return trait_ref.as_ref(); }
}
None
}
/// Checks whether this type implements `Drop`.
pub fn has_drop<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.ty_adt_def() {
Some(def) => def.has_dtor(cx.tcx),
_ => false,
}
}
/// Returns the method names and argument list of nested method call expressions that make up
/// `expr`. method/span lists are sorted with the most recent call first.
pub fn method_calls(expr: &Expr, max_depth: usize) -> (Vec<Symbol>, Vec<&[Expr]>, Vec<Span>) {
let mut method_names = Vec::with_capacity(max_depth);
let mut arg_lists = Vec::with_capacity(max_depth);
let mut spans = Vec::with_capacity(max_depth);
let mut current = expr;
for _ in 0..max_depth {
if let ExprKind::MethodCall(path, span, args) = ¤t.kind {
if args.iter().any(|e| e.span.from_expansion()) {
break;
}
method_names.push(path.ident.name);
arg_lists.push(&**args);
spans.push(*span);
current = &args[0];
} else {
break;
}
}
(method_names, arg_lists, spans)
}
/// Matches an `Expr` against a chain of methods, and return the matched `Expr`s.
///
/// For example, if `expr` represents the `.baz()` in `foo.bar().baz()`,
/// `matched_method_chain(expr, &["bar", "baz"])` will return a `Vec`
/// containing the `Expr`s for
/// `.bar()` and `.baz()`
pub fn method_chain_args<'a>(expr: &'a Expr, methods: &[&str]) -> Option<Vec<&'a [Expr]>> {
let mut current = expr;
let mut matched = Vec::with_capacity(methods.len());
for method_name in methods.iter().rev() {
// method chains are stored last -> first
if let ExprKind::MethodCall(ref path, _, ref args) = current.kind {
if path.ident.name.as_str() == *method_name {
if args.iter().any(|e| e.span.from_expansion()) {
return None;
}
matched.push(&**args); // build up `matched` backwards
current = &args[0] // go to parent expression
} else {
return None;
}
} else {
return None;
}
}
// Reverse `matched` so that it is in the same order as `methods`.
matched.reverse();
Some(matched)
}
/// Returns `true` if the provided `def_id` is an entrypoint to a program.
pub fn is_entrypoint_fn(cx: &LateContext<'_, '_>, def_id: DefId) -> bool {
cx.tcx
.entry_fn(LOCAL_CRATE)
.map_or(false, |(entry_fn_def_id, _)| def_id == entry_fn_def_id)
}
/// Gets the name of the item the expression is in, if available.
pub fn get_item_name(cx: &LateContext<'_, '_>, expr: &Expr) -> Option<Name> {
let parent_id = cx.tcx.hir().get_parent_item(expr.hir_id);
match cx.tcx.hir().find(parent_id) {
Some(Node::Item(&Item { ref ident, .. })) => Some(ident.name),
Some(Node::TraitItem(&TraitItem { ident, .. })) | Some(Node::ImplItem(&ImplItem { ident, .. })) => {
Some(ident.name)
},
_ => None,
}
}
/// Gets the name of a `Pat`, if any.
pub fn get_pat_name(pat: &Pat) -> Option<Name> {
match pat.kind {
PatKind::Binding(.., ref spname, _) => Some(spname.name),
PatKind::Path(ref qpath) => single_segment_path(qpath).map(|ps| ps.ident.name),
PatKind::Box(ref p) | PatKind::Ref(ref p, _) => get_pat_name(&*p),
_ => None,
}
}
struct ContainsName {
name: Name,
result: bool,
}
impl<'tcx> Visitor<'tcx> for ContainsName {
fn visit_name(&mut self, _: Span, name: Name) {
if self.name == name {
self.result = true;
}
}
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
}
/// Checks if an `Expr` contains a certain name.
pub fn contains_name(name: Name, expr: &Expr) -> bool {
let mut cn = ContainsName { name, result: false };
cn.visit_expr(expr);
cn.result
}
/// Converts a span to a code snippet if available, otherwise use default.
///
/// This is useful if you want to provide suggestions for your lint or more generally, if you want
/// to convert a given `Span` to a `str`.
///
/// # Example
/// ```rust,ignore
/// snippet(cx, expr.span, "..")
/// ```
pub fn snippet<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
snippet_opt(cx, span).map_or_else(|| Cow::Borrowed(default), From::from)
}
/// Same as `snippet`, but it adapts the applicability level by following rules:
///
/// - Applicability level `Unspecified` will never be changed.
/// - If the span is inside a macro, change the applicability level to `MaybeIncorrect`.
/// - If the default value is used and the applicability level is `MachineApplicable`, change it to
/// `HasPlaceholders`
pub fn snippet_with_applicability<'a, T: LintContext>(
cx: &T,
span: Span,
default: &'a str,
applicability: &mut Applicability,
) -> Cow<'a, str> {
if *applicability != Applicability::Unspecified && span.from_expansion() {
*applicability = Applicability::MaybeIncorrect;
}
snippet_opt(cx, span).map_or_else(
|| {
if *applicability == Applicability::MachineApplicable {
*applicability = Applicability::HasPlaceholders;
}
Cow::Borrowed(default)
},
From::from,
)
}
/// Same as `snippet`, but should only be used when it's clear that the input span is
/// not a macro argument.
pub fn snippet_with_macro_callsite<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
snippet(cx, span.source_callsite(), default)
}
/// Converts a span to a code snippet. Returns `None` if not available.
pub fn snippet_opt<T: LintContext>(cx: &T, span: Span) -> Option<String> {
cx.sess().source_map().span_to_snippet(span).ok()
}
/// Converts a span (from a block) to a code snippet if available, otherwise use
/// default.
/// This trims the code of indentation, except for the first line. Use it for
/// blocks or block-like
/// things which need to be printed as such.
///
/// # Example
/// ```rust,ignore
/// snippet_block(cx, expr.span, "..")
/// ```
pub fn snippet_block<'a, T: LintContext>(cx: &T, span: Span, default: &'a str) -> Cow<'a, str> {
let snip = snippet(cx, span, default);
trim_multiline(snip, true)
}
/// Same as `snippet_block`, but adapts the applicability level by the rules of
/// `snippet_with_applicabiliy`.
pub fn snippet_block_with_applicability<'a, T: LintContext>(
cx: &T,
span: Span,
default: &'a str,
applicability: &mut Applicability,
) -> Cow<'a, str> {
let snip = snippet_with_applicability(cx, span, default, applicability);
trim_multiline(snip, true)
}
/// Returns a new Span that covers the full last line of the given Span
pub fn last_line_of_span<T: LintContext>(cx: &T, span: Span) -> Span {
let source_map_and_line = cx.sess().source_map().lookup_line(span.lo()).unwrap();
let line_no = source_map_and_line.line;
let line_start = &source_map_and_line.sf.lines[line_no];
Span::new(*line_start, span.hi(), span.ctxt())
}
/// Like `snippet_block`, but add braces if the expr is not an `ExprKind::Block`.
/// Also takes an `Option<String>` which can be put inside the braces.
pub fn expr_block<'a, T: LintContext>(cx: &T, expr: &Expr, option: Option<String>, default: &'a str) -> Cow<'a, str> {
let code = snippet_block(cx, expr.span, default);
let string = option.unwrap_or_default();
if expr.span.from_expansion() {
Cow::Owned(format!("{{ {} }}", snippet_with_macro_callsite(cx, expr.span, default)))
} else if let ExprKind::Block(_, _) = expr.kind {
Cow::Owned(format!("{}{}", code, string))
} else if string.is_empty() {
Cow::Owned(format!("{{ {} }}", code))
} else {
Cow::Owned(format!("{{\n{};\n{}\n}}", code, string))
}
}
/// Trim indentation from a multiline string with possibility of ignoring the
/// first line.
pub fn trim_multiline(s: Cow<'_, str>, ignore_first: bool) -> Cow<'_, str> {
let s_space = trim_multiline_inner(s, ignore_first, ' ');
let s_tab = trim_multiline_inner(s_space, ignore_first, '\t');
trim_multiline_inner(s_tab, ignore_first, ' ')
}
fn trim_multiline_inner(s: Cow<'_, str>, ignore_first: bool, ch: char) -> Cow<'_, str> {
let x = s
.lines()
.skip(ignore_first as usize)
.filter_map(|l| {
if l.is_empty() {
None
} else {
// ignore empty lines
Some(l.char_indices().find(|&(_, x)| x != ch).unwrap_or((l.len(), ch)).0)
}
})
.min()
.unwrap_or(0);
if x > 0 {
Cow::Owned(
s.lines()
.enumerate()
.map(|(i, l)| {
if (ignore_first && i == 0) || l.is_empty() {
l
} else {
l.split_at(x).1
}
})
.collect::<Vec<_>>()
.join("\n"),
)
} else {
s
}
}
/// Gets the parent expression, if any –- this is useful to constrain a lint.
pub fn get_parent_expr<'c>(cx: &'c LateContext<'_, '_>, e: &Expr) -> Option<&'c Expr> {
let map = &cx.tcx.hir();
let hir_id = e.hir_id;
let parent_id = map.get_parent_node(hir_id);
if hir_id == parent_id {
return None;
}
map.find(parent_id).and_then(|node| {
if let Node::Expr(parent) = node {
Some(parent)
} else {
None
}
})
}
pub fn get_enclosing_block<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, hir_id: HirId) -> Option<&'tcx Block> {
let map = &cx.tcx.hir();
let enclosing_node = map
.get_enclosing_scope(hir_id)
.and_then(|enclosing_id| map.find(enclosing_id));
if let Some(node) = enclosing_node {
match node {
Node::Block(block) => Some(block),
Node::Item(&Item {
kind: ItemKind::Fn(_, _, eid),
..
})
| Node::ImplItem(&ImplItem {
kind: ImplItemKind::Method(_, eid),
..
}) => match cx.tcx.hir().body(eid).value.kind {
ExprKind::Block(ref block, _) => Some(block),
_ => None,
},
_ => None,
}
} else {
None
}
}
/// Returns the base type for HIR references and pointers.
pub fn walk_ptrs_hir_ty(ty: &hir::Ty) -> &hir::Ty {
match ty.kind {
TyKind::Ptr(ref mut_ty) | TyKind::Rptr(_, ref mut_ty) => walk_ptrs_hir_ty(&mut_ty.ty),
_ => ty,
}
}
/// Returns the base type for references and raw pointers.
pub fn walk_ptrs_ty(ty: Ty<'_>) -> Ty<'_> {
match ty.kind {
ty::Ref(_, ty, _) => walk_ptrs_ty(ty),
_ => ty,
}
}
/// Returns the base type for references and raw pointers, and count reference
/// depth.
pub fn walk_ptrs_ty_depth(ty: Ty<'_>) -> (Ty<'_>, usize) {
fn inner(ty: Ty<'_>, depth: usize) -> (Ty<'_>, usize) {
match ty.kind {
ty::Ref(_, ty, _) => inner(ty, depth + 1),
_ => (ty, depth),
}
}
inner(ty, 0)
}
/// Checks whether the given expression is a constant integer of the given value.
/// unlike `is_integer_literal`, this version does const folding
pub fn is_integer_const(cx: &LateContext<'_, '_>, e: &Expr, value: u128) -> bool {
if is_integer_literal(e, value) {
return true;
}
let map = cx.tcx.hir();
let parent_item = map.get_parent_item(e.hir_id);
if let Some((Constant::Int(v), _)) = map
.maybe_body_owned_by(parent_item)
.and_then(|body_id| constant(cx, cx.tcx.body_tables(body_id), e))
{
value == v
} else {
false
}
}
/// Checks whether the given expression is a constant literal of the given value.
pub fn is_integer_literal(expr: &Expr, value: u128) -> bool {
// FIXME: use constant folding
if let ExprKind::Lit(ref spanned) = expr.kind {
if let LitKind::Int(v, _) = spanned.node {
return v == value;
}
}
false
}
/// Returns `true` if the given `Expr` has been coerced before.
///
/// Examples of coercions can be found in the Nomicon at
/// <https://doc.rust-lang.org/nomicon/coercions.html>.
///
/// See `rustc::ty::adjustment::Adjustment` and `rustc_typeck::check::coercion` for more
/// information on adjustments and coercions.
pub fn is_adjusted(cx: &LateContext<'_, '_>, e: &Expr) -> bool {
cx.tables.adjustments().get(e.hir_id).is_some()
}
/// Returns the pre-expansion span if is this comes from an expansion of the
/// macro `name`.
/// See also `is_direct_expn_of`.
#[must_use]
pub fn is_expn_of(mut span: Span, name: &str) -> Option<Span> {
loop {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let mac_name = data.kind.descr();
let new_span = data.call_site;
if mac_name.as_str() == name {
return Some(new_span);
} else {
span = new_span;
}
} else {
return None;
}
}
}
/// Returns the pre-expansion span if the span directly comes from an expansion
/// of the macro `name`.
/// The difference with `is_expn_of` is that in
/// ```rust,ignore
/// foo!(bar!(42));
/// ```
/// `42` is considered expanded from `foo!` and `bar!` by `is_expn_of` but only
/// `bar!` by
/// `is_direct_expn_of`.
#[must_use]
pub fn is_direct_expn_of(span: Span, name: &str) -> Option<Span> {
if span.from_expansion() {
let data = span.ctxt().outer_expn_data();
let mac_name = data.kind.descr();
let new_span = data.call_site;
if mac_name.as_str() == name {
Some(new_span)
} else {
None
}
} else {
None
}
}
/// Convenience function to get the return type of a function.
pub fn return_ty<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, fn_item: hir::HirId) -> Ty<'tcx> {
let fn_def_id = cx.tcx.hir().local_def_id(fn_item);
let ret_ty = cx.tcx.fn_sig(fn_def_id).output();
cx.tcx.erase_late_bound_regions(&ret_ty)
}
/// Checks if two types are the same.
///
/// This discards any lifetime annotations, too.
//
// FIXME: this works correctly for lifetimes bounds (`for <'a> Foo<'a>` ==
// `for <'b> Foo<'b>`, but not for type parameters).
pub fn same_tys<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
let a = cx.tcx.erase_late_bound_regions(&Binder::bind(a));
let b = cx.tcx.erase_late_bound_regions(&Binder::bind(b));
cx.tcx
.infer_ctxt()
.enter(|infcx| infcx.can_eq(cx.param_env, a, b).is_ok())
}
/// Returns `true` if the given type is an `unsafe` function.
pub fn type_is_unsafe_function<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.kind {
ty::FnDef(..) | ty::FnPtr(_) => ty.fn_sig(cx.tcx).unsafety() == Unsafety::Unsafe,
_ => false,
}
}
pub fn is_copy<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
ty.is_copy_modulo_regions(cx.tcx, cx.param_env, DUMMY_SP)
}
/// Checks if an expression is constructing a tuple-like enum variant or struct
pub fn is_ctor_or_promotable_const_function(cx: &LateContext<'_, '_>, expr: &Expr) -> bool {
if let ExprKind::Call(ref fun, _) = expr.kind {
if let ExprKind::Path(ref qp) = fun.kind {
let res = cx.tables.qpath_res(qp, fun.hir_id);
return match res {
def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(..), _) => true,
def::Res::Def(_, def_id) => cx.tcx.is_promotable_const_fn(def_id),
_ => false,
};
}
}
false
}
/// Returns `true` if a pattern is refutable.
pub fn is_refutable(cx: &LateContext<'_, '_>, pat: &Pat) -> bool {
fn is_enum_variant(cx: &LateContext<'_, '_>, qpath: &QPath, id: HirId) -> bool {
matches!(
cx.tables.qpath_res(qpath, id),
def::Res::Def(DefKind::Variant, ..) | Res::Def(DefKind::Ctor(def::CtorOf::Variant, _), _)
)
}
fn are_refutable<'a, I: Iterator<Item = &'a Pat>>(cx: &LateContext<'_, '_>, mut i: I) -> bool {
i.any(|pat| is_refutable(cx, pat))
}
match pat.kind {
PatKind::Binding(..) | PatKind::Wild => false,
PatKind::Box(ref pat) | PatKind::Ref(ref pat, _) => is_refutable(cx, pat),
PatKind::Lit(..) | PatKind::Range(..) => true,
PatKind::Path(ref qpath) => is_enum_variant(cx, qpath, pat.hir_id),
PatKind::Or(ref pats) | PatKind::Tuple(ref pats, _) => are_refutable(cx, pats.iter().map(|pat| &**pat)),
PatKind::Struct(ref qpath, ref fields, _) => {
if is_enum_variant(cx, qpath, pat.hir_id) {
true
} else {
are_refutable(cx, fields.iter().map(|field| &*field.pat))
}
},
PatKind::TupleStruct(ref qpath, ref pats, _) => {
if is_enum_variant(cx, qpath, pat.hir_id) {
true
} else {
are_refutable(cx, pats.iter().map(|pat| &**pat))
}
},
PatKind::Slice(ref head, ref middle, ref tail) => {
are_refutable(cx, head.iter().chain(middle).chain(tail.iter()).map(|pat| &**pat))
},
}
}
/// Checks for the `#[automatically_derived]` attribute all `#[derive]`d
/// implementations have.
pub fn is_automatically_derived(attrs: &[ast::Attribute]) -> bool {
attr::contains_name(attrs, sym!(automatically_derived))
}
/// Remove blocks around an expression.
///
/// Ie. `x`, `{ x }` and `{{{{ x }}}}` all give `x`. `{ x; y }` and `{}` return
/// themselves.
pub fn remove_blocks(expr: &Expr) -> &Expr {
if let ExprKind::Block(ref block, _) = expr.kind {
if block.stmts.is_empty() {
if let Some(ref expr) = block.expr {
remove_blocks(expr)
} else {
expr
}
} else {
expr
}
} else {
expr
}
}
pub fn is_self(slf: &Param) -> bool {
if let PatKind::Binding(.., name, _) = slf.pat.kind {
name.name == kw::SelfLower
} else {
false
}
}
pub fn is_self_ty(slf: &hir::Ty) -> bool {
if_chain! {
if let TyKind::Path(ref qp) = slf.kind;
if let QPath::Resolved(None, ref path) = *qp;
if let Res::SelfTy(..) = path.res;
then {
return true
}
}
false
}
pub fn iter_input_pats<'tcx>(decl: &FnDecl, body: &'tcx Body) -> impl Iterator<Item = &'tcx Param> {
(0..decl.inputs.len()).map(move |i| &body.params[i])
}
/// Checks if a given expression is a match expression expanded from the `?`
/// operator or the `try` macro.
pub fn is_try(expr: &Expr) -> Option<&Expr> {
fn is_ok(arm: &Arm) -> bool {
if_chain! {
if let PatKind::TupleStruct(ref path, ref pat, None) = arm.pat.kind;
if match_qpath(path, &paths::RESULT_OK[1..]);
if let PatKind::Binding(_, hir_id, _, None) = pat[0].kind;
if let ExprKind::Path(QPath::Resolved(None, ref path)) = arm.body.kind;
if let Res::Local(lid) = path.res;
if lid == hir_id;
then {
return true;
}
}
false
}
fn is_err(arm: &Arm) -> bool {
if let PatKind::TupleStruct(ref path, _, _) = arm.pat.kind {
match_qpath(path, &paths::RESULT_ERR[1..])
} else {
false
}
}
if let ExprKind::Match(_, ref arms, ref source) = expr.kind {
// desugared from a `?` operator
if let MatchSource::TryDesugar = *source {
return Some(expr);
}
if_chain! {
if arms.len() == 2;
if arms[0].guard.is_none();
if arms[1].guard.is_none();
if (is_ok(&arms[0]) && is_err(&arms[1])) ||
(is_ok(&arms[1]) && is_err(&arms[0]));
then {
return Some(expr);
}
}
}
None
}
/// Returns `true` if the lint is allowed in the current context
///
/// Useful for skipping long running code when it's unnecessary
pub fn is_allowed(cx: &LateContext<'_, '_>, lint: &'static Lint, id: HirId) -> bool {
cx.tcx.lint_level_at_node(lint, id).0 == Level::Allow
}
pub fn get_arg_name(pat: &Pat) -> Option<ast::Name> {
match pat.kind {
PatKind::Binding(.., ident, None) => Some(ident.name),
PatKind::Ref(ref subpat, _) => get_arg_name(subpat),
_ => None,
}
}
pub fn int_bits(tcx: TyCtxt<'_>, ity: ast::IntTy) -> u64 {
layout::Integer::from_attr(&tcx, attr::IntType::SignedInt(ity))
.size()
.bits()
}
#[allow(clippy::cast_possible_wrap)]
/// Turn a constant int byte representation into an i128
pub fn sext(tcx: TyCtxt<'_>, u: u128, ity: ast::IntTy) -> i128 {
let amt = 128 - int_bits(tcx, ity);
((u as i128) << amt) >> amt
}
#[allow(clippy::cast_sign_loss)]
/// clip unused bytes
pub fn unsext(tcx: TyCtxt<'_>, u: i128, ity: ast::IntTy) -> u128 {
let amt = 128 - int_bits(tcx, ity);
((u as u128) << amt) >> amt
}
/// clip unused bytes
pub fn clip(tcx: TyCtxt<'_>, u: u128, ity: ast::UintTy) -> u128 {
let bits = layout::Integer::from_attr(&tcx, attr::IntType::UnsignedInt(ity))
.size()
.bits();
let amt = 128 - bits;
(u << amt) >> amt
}
/// Removes block comments from the given `Vec` of lines.
///
/// # Examples
///
/// ```rust,ignore
/// without_block_comments(vec!["/*", "foo", "*/"]);
/// // => vec![]
///
/// without_block_comments(vec!["bar", "/*", "foo", "*/"]);
/// // => vec!["bar"]
/// ```
pub fn without_block_comments(lines: Vec<&str>) -> Vec<&str> {
let mut without = vec![];
let mut nest_level = 0;
for line in lines {
if line.contains("/*") {
nest_level += 1;
continue;
} else if line.contains("*/") {
nest_level -= 1;
continue;
}
if nest_level == 0 {
without.push(line);
}
}
without
}
pub fn any_parent_is_automatically_derived(tcx: TyCtxt<'_>, node: HirId) -> bool {
let map = &tcx.hir();
let mut prev_enclosing_node = None;
let mut enclosing_node = node;
while Some(enclosing_node) != prev_enclosing_node {
if is_automatically_derived(map.attrs(enclosing_node)) {
return true;
}
prev_enclosing_node = Some(enclosing_node);
enclosing_node = map.get_parent_item(enclosing_node);
}
false
}
/// Returns true if ty has `iter` or `iter_mut` methods
pub fn has_iter_method(cx: &LateContext<'_, '_>, probably_ref_ty: Ty<'_>) -> Option<&'static str> {
// FIXME: instead of this hard-coded list, we should check if `<adt>::iter`
// exists and has the desired signature. Unfortunately FnCtxt is not exported
// so we can't use its `lookup_method` method.
let into_iter_collections: [&[&str]; 13] = [
&paths::VEC,
&paths::OPTION,
&paths::RESULT,
&paths::BTREESET,
&paths::BTREEMAP,
&paths::VEC_DEQUE,
&paths::LINKED_LIST,
&paths::BINARY_HEAP,
&paths::HASHSET,
&paths::HASHMAP,
&paths::PATH_BUF,
&paths::PATH,
&paths::RECEIVER,
];
let ty_to_check = match probably_ref_ty.kind {
ty::Ref(_, ty_to_check, _) => ty_to_check,
_ => probably_ref_ty,
};
let def_id = match ty_to_check.kind {
ty::Array(..) => return Some("array"),
ty::Slice(..) => return Some("slice"),
ty::Adt(adt, _) => adt.did,
_ => return None,
};
for path in &into_iter_collections {
if match_def_path(cx, def_id, path) {
return Some(*path.last().unwrap());
}
}
None
}
/// Matches a function call with the given path and returns the arguments.
///
/// Usage:
///
/// ```rust,ignore
/// if let Some(args) = match_function_call(cx, begin_panic_call, &paths::BEGIN_PANIC);
/// ```
pub fn match_function_call<'a, 'tcx>(
cx: &LateContext<'a, 'tcx>,
expr: &'tcx Expr,
path: &[&str],
) -> Option<&'tcx [Expr]> {
if_chain! {
if let ExprKind::Call(ref fun, ref args) = expr.kind;
if let ExprKind::Path(ref qpath) = fun.kind;
if let Some(fun_def_id) = cx.tables.qpath_res(qpath, fun.hir_id).opt_def_id();
if match_def_path(cx, fun_def_id, path);
then {
return Some(&args)
}
};
None
}
#[cfg(test)]
mod test {
use super::{trim_multiline, without_block_comments};
#[test]
fn test_trim_multiline_single_line() {
assert_eq!("", trim_multiline("".into(), false));
assert_eq!("...", trim_multiline("...".into(), false));
assert_eq!("...", trim_multiline(" ...".into(), false));
assert_eq!("...", trim_multiline("\t...".into(), false));
assert_eq!("...", trim_multiline("\t\t...".into(), false));
}
#[test]
#[rustfmt::skip]
fn test_trim_multiline_block() {
assert_eq!("\
if x {
y
} else {
z
}", trim_multiline(" if x {
y
} else {
z
}".into(), false));
assert_eq!("\
if x {
\ty
} else {
\tz
}", trim_multiline(" if x {
\ty
} else {
\tz
}".into(), false));
}
#[test]
#[rustfmt::skip]
fn test_trim_multiline_empty_line() {
assert_eq!("\
if x {
y
} else {
z
}", trim_multiline(" if x {
y
} else {
z
}".into(), false));
}
#[test]
fn test_without_block_comments_lines_without_block_comments() {
let result = without_block_comments(vec!["/*", "", "*/"]);
println!("result: {:?}", result);
assert!(result.is_empty());
let result = without_block_comments(vec!["", "/*", "", "*/", "#[crate_type = \"lib\"]", "/*", "", "*/", ""]);
assert_eq!(result, vec!["", "#[crate_type = \"lib\"]", ""]);
let result = without_block_comments(vec!["/* rust", "", "*/"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* one-line comment */"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* nested", "/* multi-line", "comment", "*/", "test", "*/"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["/* nested /* inline /* comment */ test */ */"]);
assert!(result.is_empty());
let result = without_block_comments(vec!["foo", "bar", "baz"]);
assert_eq!(result, vec!["foo", "bar", "baz"]);
}
}
pub fn match_def_path<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, did: DefId, syms: &[&str]) -> bool {
let path = cx.get_def_path(did);
path.len() == syms.len() && path.into_iter().zip(syms.iter()).all(|(a, &b)| a.as_str() == b)
}
/// Returns the list of condition expressions and the list of blocks in a
/// sequence of `if/else`.
/// E.g., this returns `([a, b], [c, d, e])` for the expression
/// `if a { c } else if b { d } else { e }`.
pub fn if_sequence(mut expr: &Expr) -> (SmallVec<[&Expr; 1]>, SmallVec<[&Block; 1]>) {
let mut conds = SmallVec::new();
let mut blocks: SmallVec<[&Block; 1]> = SmallVec::new();
while let Some((ref cond, ref then_expr, ref else_expr)) = higher::if_block(&expr) {
conds.push(&**cond);
if let ExprKind::Block(ref block, _) = then_expr.kind {
blocks.push(block);
} else {
panic!("ExprKind::If node is not an ExprKind::Block");
}
if let Some(ref else_expr) = *else_expr {
expr = else_expr;
} else {
break;
}
}
// final `else {..}`
if !blocks.is_empty() {
if let ExprKind::Block(ref block, _) = expr.kind {
blocks.push(&**block);
}
}
(conds, blocks)
}
pub fn parent_node_is_if_expr<'a, 'b>(expr: &Expr, cx: &LateContext<'a, 'b>) -> bool {
let parent_id = cx.tcx.hir().get_parent_node(expr.hir_id);
let parent_node = cx.tcx.hir().get(parent_id);
match parent_node {
rustc::hir::Node::Expr(e) => higher::if_block(&e).is_some(),
rustc::hir::Node::Arm(e) => higher::if_block(&e.body).is_some(),
_ => false,
}
}
|
extern crate arrayvec;
#[macro_use] extern crate matches;
use arrayvec::ArrayVec;
use arrayvec::ArrayString;
use std::mem;
use arrayvec::CapacityError;
use std::collections::HashMap;
#[test]
fn test_simple() {
use std::ops::Add;
let mut vec: ArrayVec<[Vec<i32>; 3]> = ArrayVec::new();
vec.push(vec![1, 2, 3, 4]);
vec.push(vec![10]);
vec.push(vec![-1, 13, -2]);
for elt in &vec {
assert_eq!(elt.iter().fold(0, Add::add), 10);
}
let sum_len = vec.into_iter().map(|x| x.len()).fold(0, Add::add);
assert_eq!(sum_len, 8);
}
#[test]
fn test_capacity_left() {
let mut vec: ArrayVec<[usize; 4]> = ArrayVec::new();
assert_eq!(vec.remaining_capacity(), 4);
vec.push(1);
assert_eq!(vec.remaining_capacity(), 3);
vec.push(2);
assert_eq!(vec.remaining_capacity(), 2);
vec.push(3);
assert_eq!(vec.remaining_capacity(), 1);
vec.push(4);
assert_eq!(vec.remaining_capacity(), 0);
}
#[test]
fn test_extend_from_slice() {
let mut vec: ArrayVec<[usize; 10]> = ArrayVec::new();
vec.try_extend_from_slice(&[1, 2, 3]).unwrap();
assert_eq!(vec.len(), 3);
assert_eq!(&vec[..], &[1, 2, 3]);
assert_eq!(vec.pop(), Some(3));
assert_eq!(&vec[..], &[1, 2]);
}
#[test]
fn test_extend_from_slice_error() {
let mut vec: ArrayVec<[usize; 10]> = ArrayVec::new();
vec.try_extend_from_slice(&[1, 2, 3]).unwrap();
let res = vec.try_extend_from_slice(&[0; 8]);
assert_matches!(res, Err(_));
let mut vec: ArrayVec<[usize; 0]> = ArrayVec::new();
let res = vec.try_extend_from_slice(&[0; 1]);
assert_matches!(res, Err(_));
}
#[test]
fn test_u16_index() {
const N: usize = 4096;
let mut vec: ArrayVec<[_; N]> = ArrayVec::new();
for _ in 0..N {
assert!(vec.try_push(1u8).is_ok());
}
assert!(vec.try_push(0).is_err());
assert_eq!(vec.len(), N);
}
#[test]
fn test_iter() {
let mut iter = ArrayVec::from([1, 2, 3]).into_iter();
assert_eq!(iter.size_hint(), (3, Some(3)));
assert_eq!(iter.next_back(), Some(3));
assert_eq!(iter.next(), Some(1));
assert_eq!(iter.next_back(), Some(2));
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next_back(), None);
}
#[test]
fn test_drop() {
use std::cell::Cell;
let flag = &Cell::new(0);
#[derive(Clone)]
struct Bump<'a>(&'a Cell<i32>);
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
}
}
{
let mut array = ArrayVec::<[Bump; 128]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
}
assert_eq!(flag.get(), 2);
// test something with the nullable pointer optimization
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(vec![Bump(flag)]);
array.push(vec![Bump(flag), Bump(flag)]);
array.push(vec![]);
let push4 = array.try_push(vec![Bump(flag)]);
assert_eq!(flag.get(), 0);
drop(push4);
assert_eq!(flag.get(), 1);
drop(array.pop());
assert_eq!(flag.get(), 1);
drop(array.pop());
assert_eq!(flag.get(), 3);
}
assert_eq!(flag.get(), 4);
// test into_inner
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let inner = array.into_inner();
assert!(inner.is_ok());
assert_eq!(flag.get(), 0);
drop(inner);
assert_eq!(flag.get(), 3);
}
// test cloning into_iter
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let mut iter = array.into_iter();
assert_eq!(flag.get(), 0);
iter.next();
assert_eq!(flag.get(), 1);
let clone = iter.clone();
assert_eq!(flag.get(), 1);
drop(clone);
assert_eq!(flag.get(), 3);
drop(iter);
assert_eq!(flag.get(), 5);
}
}
#[test]
fn test_drop_panics() {
use std::cell::Cell;
use std::panic::catch_unwind;
use std::panic::AssertUnwindSafe;
let flag = &Cell::new(0);
struct Bump<'a>(&'a Cell<i32>);
// Panic in the first drop
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
if n == 0 {
panic!("Panic in Bump's drop");
}
}
}
// check if rust is new enough
flag.set(0);
{
let array = vec![Bump(flag), Bump(flag)];
let res = catch_unwind(AssertUnwindSafe(|| {
drop(array);
}));
assert!(res.is_err());
}
if flag.get() != 2 {
println!("test_drop_panics: skip, this version of Rust doesn't continue in drop_in_place");
return;
}
flag.set(0);
{
let mut array = ArrayVec::<[Bump; 128]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let res = catch_unwind(AssertUnwindSafe(|| {
drop(array);
}));
assert!(res.is_err());
}
// Check that all the elements drop, even if the first drop panics.
assert_eq!(flag.get(), 3);
flag.set(0);
{
let mut array = ArrayVec::<[Bump; 16]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let i = 2;
let tail_len = array.len() - i;
let res = catch_unwind(AssertUnwindSafe(|| {
array.truncate(i);
}));
assert!(res.is_err());
// Check that all the tail elements drop, even if the first drop panics.
assert_eq!(flag.get(), tail_len as i32);
}
}
#[test]
fn test_extend() {
let mut range = 0..10;
let mut array: ArrayVec<[_; 5]> = range.by_ref().collect();
assert_eq!(&array[..], &[0, 1, 2, 3, 4]);
assert_eq!(range.next(), Some(5));
array.extend(range.by_ref());
assert_eq!(range.next(), Some(6));
let mut array: ArrayVec<[_; 10]> = (0..3).collect();
assert_eq!(&array[..], &[0, 1, 2]);
array.extend(3..5);
assert_eq!(&array[..], &[0, 1, 2, 3, 4]);
}
#[test]
fn test_is_send_sync() {
let data = ArrayVec::<[Vec<i32>; 5]>::new();
&data as &dyn Send;
&data as &dyn Sync;
}
#[test]
fn test_compact_size() {
// Future rust will kill these drop flags!
// 4 elements size + 1 len + 1 enum tag + [1 drop flag]
type ByteArray = ArrayVec<[u8; 4]>;
println!("{}", mem::size_of::<ByteArray>());
assert!(mem::size_of::<ByteArray>() <= 8);
// 1 enum tag + 1 drop flag
type EmptyArray = ArrayVec<[u8; 0]>;
println!("{}", mem::size_of::<EmptyArray>());
assert!(mem::size_of::<EmptyArray>() <= 2);
// 12 element size + 1 enum tag + 3 padding + 1 len + 1 drop flag + 2 padding
type QuadArray = ArrayVec<[u32; 3]>;
println!("{}", mem::size_of::<QuadArray>());
assert!(mem::size_of::<QuadArray>() <= 24);
}
#[test]
fn test_still_works_with_option_arrayvec() {
type RefArray = ArrayVec<[&'static i32; 2]>;
let array = Some(RefArray::new());
assert!(array.is_some());
println!("{:?}", array);
}
#[test]
fn test_drain() {
let mut v = ArrayVec::from([0; 8]);
v.pop();
v.drain(0..7);
assert_eq!(&v[..], &[]);
v.extend(0..);
v.drain(1..4);
assert_eq!(&v[..], &[0, 4, 5, 6, 7]);
let u: ArrayVec<[_; 3]> = v.drain(1..4).rev().collect();
assert_eq!(&u[..], &[6, 5, 4]);
assert_eq!(&v[..], &[0, 7]);
v.drain(..);
assert_eq!(&v[..], &[]);
}
#[test]
fn test_drain_range_inclusive() {
let mut v = ArrayVec::from([0; 8]);
v.drain(0..=7);
assert_eq!(&v[..], &[]);
v.extend(0..);
v.drain(1..=4);
assert_eq!(&v[..], &[0, 5, 6, 7]);
let u: ArrayVec<[_; 3]> = v.drain(1..=2).rev().collect();
assert_eq!(&u[..], &[6, 5]);
assert_eq!(&v[..], &[0, 7]);
v.drain(..);
assert_eq!(&v[..], &[]);
}
#[test]
#[should_panic]
fn test_drain_range_inclusive_oob() {
let mut v = ArrayVec::from([0; 0]);
v.drain(0..=0);
}
#[test]
fn test_retain() {
let mut v = ArrayVec::from([0; 8]);
for (i, elt) in v.iter_mut().enumerate() {
*elt = i;
}
v.retain(|_| true);
assert_eq!(&v[..], &[0, 1, 2, 3, 4, 5, 6, 7]);
v.retain(|elt| {
*elt /= 2;
*elt % 2 == 0
});
assert_eq!(&v[..], &[0, 0, 2, 2]);
v.retain(|_| false);
assert_eq!(&v[..], &[]);
}
#[test]
#[should_panic]
fn test_drain_oob() {
let mut v = ArrayVec::from([0; 8]);
v.pop();
v.drain(0..8);
}
#[test]
#[should_panic]
fn test_drop_panic() {
struct DropPanic;
impl Drop for DropPanic {
fn drop(&mut self) {
panic!("drop");
}
}
let mut array = ArrayVec::<[DropPanic; 1]>::new();
array.push(DropPanic);
}
#[test]
#[should_panic]
fn test_drop_panic_into_iter() {
struct DropPanic;
impl Drop for DropPanic {
fn drop(&mut self) {
panic!("drop");
}
}
let mut array = ArrayVec::<[DropPanic; 1]>::new();
array.push(DropPanic);
array.into_iter();
}
#[test]
fn test_insert() {
let mut v = ArrayVec::from([]);
assert_matches!(v.try_push(1), Err(_));
let mut v = ArrayVec::<[_; 3]>::new();
v.insert(0, 0);
v.insert(1, 1);
//let ret1 = v.try_insert(3, 3);
//assert_matches!(ret1, Err(InsertError::OutOfBounds(_)));
assert_eq!(&v[..], &[0, 1]);
v.insert(2, 2);
assert_eq!(&v[..], &[0, 1, 2]);
let ret2 = v.try_insert(1, 9);
assert_eq!(&v[..], &[0, 1, 2]);
assert_matches!(ret2, Err(_));
let mut v = ArrayVec::from([2]);
assert_matches!(v.try_insert(0, 1), Err(CapacityError { .. }));
assert_matches!(v.try_insert(1, 1), Err(CapacityError { .. }));
//assert_matches!(v.try_insert(2, 1), Err(CapacityError { .. }));
}
#[test]
fn test_into_inner_1() {
let mut v = ArrayVec::from([1, 2]);
v.pop();
let u = v.clone();
assert_eq!(v.into_inner(), Err(u));
}
#[test]
fn test_into_inner_2() {
let mut v = ArrayVec::<[String; 4]>::new();
v.push("a".into());
v.push("b".into());
v.push("c".into());
v.push("d".into());
assert_eq!(v.into_inner().unwrap(), ["a", "b", "c", "d"]);
}
#[test]
fn test_into_inner_3_() {
let mut v = ArrayVec::<[i32; 4]>::new();
v.extend(1..);
assert_eq!(v.into_inner().unwrap(), [1, 2, 3, 4]);
}
#[test]
fn test_write() {
use std::io::Write;
let mut v = ArrayVec::<[_; 8]>::new();
write!(&mut v, "\x01\x02\x03").unwrap();
assert_eq!(&v[..], &[1, 2, 3]);
let r = v.write(&[9; 16]).unwrap();
assert_eq!(r, 5);
assert_eq!(&v[..], &[1, 2, 3, 9, 9, 9, 9, 9]);
}
#[test]
fn array_clone_from() {
let mut v = ArrayVec::<[_; 4]>::new();
v.push(vec![1, 2]);
v.push(vec![3, 4, 5]);
v.push(vec![6]);
let reference = v.to_vec();
let mut u = ArrayVec::<[_; 4]>::new();
u.clone_from(&v);
assert_eq!(&u, &reference[..]);
let mut t = ArrayVec::<[_; 4]>::new();
t.push(vec![97]);
t.push(vec![]);
t.push(vec![5, 6, 2]);
t.push(vec![2]);
t.clone_from(&v);
assert_eq!(&t, &reference[..]);
t.clear();
t.clone_from(&v);
assert_eq!(&t, &reference[..]);
}
#[test]
fn test_string() {
use std::error::Error;
let text = "hello world";
let mut s = ArrayString::<[_; 16]>::new();
s.try_push_str(text).unwrap();
assert_eq!(&s, text);
assert_eq!(text, &s);
// Make sure Hash / Eq / Borrow match up so we can use HashMap
let mut map = HashMap::new();
map.insert(s, 1);
assert_eq!(map[text], 1);
let mut t = ArrayString::<[_; 2]>::new();
assert!(t.try_push_str(text).is_err());
assert_eq!(&t, "");
t.push_str("ab");
// DerefMut
let tmut: &mut str = &mut t;
assert_eq!(tmut, "ab");
// Test Error trait / try
let t = || -> Result<(), Box<dyn Error>> {
let mut t = ArrayString::<[_; 2]>::new();
t.try_push_str(text)?;
Ok(())
}();
assert!(t.is_err());
}
#[test]
fn test_string_from() {
let text = "hello world";
// Test `from` constructor
let u = ArrayString::<[_; 11]>::from(text).unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_parse_from_str() {
let text = "hello world";
let u: ArrayString<[_; 11]> = text.parse().unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_from_bytes() {
let text = "hello world";
let u = ArrayString::from_byte_string(b"hello world").unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_clone() {
let text = "hi";
let mut s = ArrayString::<[_; 4]>::new();
s.push_str("abcd");
let t = ArrayString::<[_; 4]>::from(text).unwrap();
s.clone_from(&t);
assert_eq!(&t, &s);
}
#[test]
fn test_string_push() {
let text = "abcαβγ";
let mut s = ArrayString::<[_; 8]>::new();
for c in text.chars() {
if let Err(_) = s.try_push(c) {
break;
}
}
assert_eq!("abcαβ", &s[..]);
s.push('x');
assert_eq!("abcαβx", &s[..]);
assert!(s.try_push('x').is_err());
}
#[test]
fn test_insert_at_length() {
let mut v = ArrayVec::<[_; 8]>::new();
let result1 = v.try_insert(0, "a");
let result2 = v.try_insert(1, "b");
assert!(result1.is_ok() && result2.is_ok());
assert_eq!(&v[..], &["a", "b"]);
}
#[should_panic]
#[test]
fn test_insert_out_of_bounds() {
let mut v = ArrayVec::<[_; 8]>::new();
let _ = v.try_insert(1, "test");
}
/*
* insert that pushes out the last
let mut u = ArrayVec::from([1, 2, 3, 4]);
let ret = u.try_insert(3, 99);
assert_eq!(&u[..], &[1, 2, 3, 99]);
assert_matches!(ret, Err(_));
let ret = u.try_insert(4, 77);
assert_eq!(&u[..], &[1, 2, 3, 99]);
assert_matches!(ret, Err(_));
*/
#[test]
fn test_drop_in_insert() {
use std::cell::Cell;
let flag = &Cell::new(0);
struct Bump<'a>(&'a Cell<i32>);
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
}
}
flag.set(0);
{
let mut array = ArrayVec::<[_; 2]>::new();
array.push(Bump(flag));
array.insert(0, Bump(flag));
assert_eq!(flag.get(), 0);
let ret = array.try_insert(1, Bump(flag));
assert_eq!(flag.get(), 0);
assert_matches!(ret, Err(_));
drop(ret);
assert_eq!(flag.get(), 1);
}
assert_eq!(flag.get(), 3);
}
#[test]
fn test_pop_at() {
let mut v = ArrayVec::<[String; 4]>::new();
let s = String::from;
v.push(s("a"));
v.push(s("b"));
v.push(s("c"));
v.push(s("d"));
assert_eq!(v.pop_at(4), None);
assert_eq!(v.pop_at(1), Some(s("b")));
assert_eq!(v.pop_at(1), Some(s("c")));
assert_eq!(v.pop_at(2), None);
assert_eq!(&v[..], &["a", "d"]);
}
#[test]
fn test_sizes() {
let v = ArrayVec::from([0u8; 1 << 16]);
assert_eq!(vec![0u8; v.len()], &v[..]);
}
#[test]
fn test_default() {
use std::net;
let s: ArrayString<[u8; 4]> = Default::default();
// Something without `Default` implementation.
let v: ArrayVec<[net::TcpStream; 4]> = Default::default();
assert_eq!(s.len(), 0);
assert_eq!(v.len(), 0);
}
#[cfg(feature="array-sizes-33-128")]
#[test]
fn test_sizes_33_128() {
ArrayVec::from([0u8; 52]);
ArrayVec::from([0u8; 127]);
}
#[cfg(feature="array-sizes-129-255")]
#[test]
fn test_sizes_129_255() {
ArrayVec::from([0u8; 237]);
ArrayVec::from([0u8; 255]);
}
#[test]
fn test_extend_zst() {
let mut range = 0..10;
#[derive(Copy, Clone, PartialEq, Debug)]
struct Z; // Zero sized type
let mut array: ArrayVec<[_; 5]> = range.by_ref().map(|_| Z).collect();
assert_eq!(&array[..], &[Z; 5]);
assert_eq!(range.next(), Some(5));
array.extend(range.by_ref().map(|_| Z));
assert_eq!(range.next(), Some(6));
let mut array: ArrayVec<[_; 10]> = (0..3).map(|_| Z).collect();
assert_eq!(&array[..], &[Z; 3]);
array.extend((3..5).map(|_| Z));
assert_eq!(&array[..], &[Z; 5]);
assert_eq!(array.len(), 5);
}
TEST: Update tests for passing without std feature
extern crate arrayvec;
#[macro_use] extern crate matches;
use arrayvec::ArrayVec;
use arrayvec::ArrayString;
use std::mem;
use arrayvec::CapacityError;
use std::collections::HashMap;
#[test]
fn test_simple() {
use std::ops::Add;
let mut vec: ArrayVec<[Vec<i32>; 3]> = ArrayVec::new();
vec.push(vec![1, 2, 3, 4]);
vec.push(vec![10]);
vec.push(vec![-1, 13, -2]);
for elt in &vec {
assert_eq!(elt.iter().fold(0, Add::add), 10);
}
let sum_len = vec.into_iter().map(|x| x.len()).fold(0, Add::add);
assert_eq!(sum_len, 8);
}
#[test]
fn test_capacity_left() {
let mut vec: ArrayVec<[usize; 4]> = ArrayVec::new();
assert_eq!(vec.remaining_capacity(), 4);
vec.push(1);
assert_eq!(vec.remaining_capacity(), 3);
vec.push(2);
assert_eq!(vec.remaining_capacity(), 2);
vec.push(3);
assert_eq!(vec.remaining_capacity(), 1);
vec.push(4);
assert_eq!(vec.remaining_capacity(), 0);
}
#[test]
fn test_extend_from_slice() {
let mut vec: ArrayVec<[usize; 10]> = ArrayVec::new();
vec.try_extend_from_slice(&[1, 2, 3]).unwrap();
assert_eq!(vec.len(), 3);
assert_eq!(&vec[..], &[1, 2, 3]);
assert_eq!(vec.pop(), Some(3));
assert_eq!(&vec[..], &[1, 2]);
}
#[test]
fn test_extend_from_slice_error() {
let mut vec: ArrayVec<[usize; 10]> = ArrayVec::new();
vec.try_extend_from_slice(&[1, 2, 3]).unwrap();
let res = vec.try_extend_from_slice(&[0; 8]);
assert_matches!(res, Err(_));
let mut vec: ArrayVec<[usize; 0]> = ArrayVec::new();
let res = vec.try_extend_from_slice(&[0; 1]);
assert_matches!(res, Err(_));
}
#[test]
fn test_u16_index() {
const N: usize = 4096;
let mut vec: ArrayVec<[_; N]> = ArrayVec::new();
for _ in 0..N {
assert!(vec.try_push(1u8).is_ok());
}
assert!(vec.try_push(0).is_err());
assert_eq!(vec.len(), N);
}
#[test]
fn test_iter() {
let mut iter = ArrayVec::from([1, 2, 3]).into_iter();
assert_eq!(iter.size_hint(), (3, Some(3)));
assert_eq!(iter.next_back(), Some(3));
assert_eq!(iter.next(), Some(1));
assert_eq!(iter.next_back(), Some(2));
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next_back(), None);
}
#[test]
fn test_drop() {
use std::cell::Cell;
let flag = &Cell::new(0);
#[derive(Clone)]
struct Bump<'a>(&'a Cell<i32>);
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
}
}
{
let mut array = ArrayVec::<[Bump; 128]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
}
assert_eq!(flag.get(), 2);
// test something with the nullable pointer optimization
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(vec![Bump(flag)]);
array.push(vec![Bump(flag), Bump(flag)]);
array.push(vec![]);
let push4 = array.try_push(vec![Bump(flag)]);
assert_eq!(flag.get(), 0);
drop(push4);
assert_eq!(flag.get(), 1);
drop(array.pop());
assert_eq!(flag.get(), 1);
drop(array.pop());
assert_eq!(flag.get(), 3);
}
assert_eq!(flag.get(), 4);
// test into_inner
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let inner = array.into_inner();
assert!(inner.is_ok());
assert_eq!(flag.get(), 0);
drop(inner);
assert_eq!(flag.get(), 3);
}
// test cloning into_iter
flag.set(0);
{
let mut array = ArrayVec::<[_; 3]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let mut iter = array.into_iter();
assert_eq!(flag.get(), 0);
iter.next();
assert_eq!(flag.get(), 1);
let clone = iter.clone();
assert_eq!(flag.get(), 1);
drop(clone);
assert_eq!(flag.get(), 3);
drop(iter);
assert_eq!(flag.get(), 5);
}
}
#[test]
fn test_drop_panics() {
use std::cell::Cell;
use std::panic::catch_unwind;
use std::panic::AssertUnwindSafe;
let flag = &Cell::new(0);
struct Bump<'a>(&'a Cell<i32>);
// Panic in the first drop
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
if n == 0 {
panic!("Panic in Bump's drop");
}
}
}
// check if rust is new enough
flag.set(0);
{
let array = vec![Bump(flag), Bump(flag)];
let res = catch_unwind(AssertUnwindSafe(|| {
drop(array);
}));
assert!(res.is_err());
}
if flag.get() != 2 {
println!("test_drop_panics: skip, this version of Rust doesn't continue in drop_in_place");
return;
}
flag.set(0);
{
let mut array = ArrayVec::<[Bump; 128]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let res = catch_unwind(AssertUnwindSafe(|| {
drop(array);
}));
assert!(res.is_err());
}
// Check that all the elements drop, even if the first drop panics.
assert_eq!(flag.get(), 3);
flag.set(0);
{
let mut array = ArrayVec::<[Bump; 16]>::new();
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
array.push(Bump(flag));
let i = 2;
let tail_len = array.len() - i;
let res = catch_unwind(AssertUnwindSafe(|| {
array.truncate(i);
}));
assert!(res.is_err());
// Check that all the tail elements drop, even if the first drop panics.
assert_eq!(flag.get(), tail_len as i32);
}
}
#[test]
fn test_extend() {
let mut range = 0..10;
let mut array: ArrayVec<[_; 5]> = range.by_ref().collect();
assert_eq!(&array[..], &[0, 1, 2, 3, 4]);
assert_eq!(range.next(), Some(5));
array.extend(range.by_ref());
assert_eq!(range.next(), Some(6));
let mut array: ArrayVec<[_; 10]> = (0..3).collect();
assert_eq!(&array[..], &[0, 1, 2]);
array.extend(3..5);
assert_eq!(&array[..], &[0, 1, 2, 3, 4]);
}
#[test]
fn test_is_send_sync() {
let data = ArrayVec::<[Vec<i32>; 5]>::new();
&data as &dyn Send;
&data as &dyn Sync;
}
#[test]
fn test_compact_size() {
// Future rust will kill these drop flags!
// 4 elements size + 1 len + 1 enum tag + [1 drop flag]
type ByteArray = ArrayVec<[u8; 4]>;
println!("{}", mem::size_of::<ByteArray>());
assert!(mem::size_of::<ByteArray>() <= 8);
// 1 enum tag + 1 drop flag
type EmptyArray = ArrayVec<[u8; 0]>;
println!("{}", mem::size_of::<EmptyArray>());
assert!(mem::size_of::<EmptyArray>() <= 2);
// 12 element size + 1 enum tag + 3 padding + 1 len + 1 drop flag + 2 padding
type QuadArray = ArrayVec<[u32; 3]>;
println!("{}", mem::size_of::<QuadArray>());
assert!(mem::size_of::<QuadArray>() <= 24);
}
#[test]
fn test_still_works_with_option_arrayvec() {
type RefArray = ArrayVec<[&'static i32; 2]>;
let array = Some(RefArray::new());
assert!(array.is_some());
println!("{:?}", array);
}
#[test]
fn test_drain() {
let mut v = ArrayVec::from([0; 8]);
v.pop();
v.drain(0..7);
assert_eq!(&v[..], &[]);
v.extend(0..);
v.drain(1..4);
assert_eq!(&v[..], &[0, 4, 5, 6, 7]);
let u: ArrayVec<[_; 3]> = v.drain(1..4).rev().collect();
assert_eq!(&u[..], &[6, 5, 4]);
assert_eq!(&v[..], &[0, 7]);
v.drain(..);
assert_eq!(&v[..], &[]);
}
#[test]
fn test_drain_range_inclusive() {
let mut v = ArrayVec::from([0; 8]);
v.drain(0..=7);
assert_eq!(&v[..], &[]);
v.extend(0..);
v.drain(1..=4);
assert_eq!(&v[..], &[0, 5, 6, 7]);
let u: ArrayVec<[_; 3]> = v.drain(1..=2).rev().collect();
assert_eq!(&u[..], &[6, 5]);
assert_eq!(&v[..], &[0, 7]);
v.drain(..);
assert_eq!(&v[..], &[]);
}
#[test]
#[should_panic]
fn test_drain_range_inclusive_oob() {
let mut v = ArrayVec::from([0; 0]);
v.drain(0..=0);
}
#[test]
fn test_retain() {
let mut v = ArrayVec::from([0; 8]);
for (i, elt) in v.iter_mut().enumerate() {
*elt = i;
}
v.retain(|_| true);
assert_eq!(&v[..], &[0, 1, 2, 3, 4, 5, 6, 7]);
v.retain(|elt| {
*elt /= 2;
*elt % 2 == 0
});
assert_eq!(&v[..], &[0, 0, 2, 2]);
v.retain(|_| false);
assert_eq!(&v[..], &[]);
}
#[test]
#[should_panic]
fn test_drain_oob() {
let mut v = ArrayVec::from([0; 8]);
v.pop();
v.drain(0..8);
}
#[test]
#[should_panic]
fn test_drop_panic() {
struct DropPanic;
impl Drop for DropPanic {
fn drop(&mut self) {
panic!("drop");
}
}
let mut array = ArrayVec::<[DropPanic; 1]>::new();
array.push(DropPanic);
}
#[test]
#[should_panic]
fn test_drop_panic_into_iter() {
struct DropPanic;
impl Drop for DropPanic {
fn drop(&mut self) {
panic!("drop");
}
}
let mut array = ArrayVec::<[DropPanic; 1]>::new();
array.push(DropPanic);
array.into_iter();
}
#[test]
fn test_insert() {
let mut v = ArrayVec::from([]);
assert_matches!(v.try_push(1), Err(_));
let mut v = ArrayVec::<[_; 3]>::new();
v.insert(0, 0);
v.insert(1, 1);
//let ret1 = v.try_insert(3, 3);
//assert_matches!(ret1, Err(InsertError::OutOfBounds(_)));
assert_eq!(&v[..], &[0, 1]);
v.insert(2, 2);
assert_eq!(&v[..], &[0, 1, 2]);
let ret2 = v.try_insert(1, 9);
assert_eq!(&v[..], &[0, 1, 2]);
assert_matches!(ret2, Err(_));
let mut v = ArrayVec::from([2]);
assert_matches!(v.try_insert(0, 1), Err(CapacityError { .. }));
assert_matches!(v.try_insert(1, 1), Err(CapacityError { .. }));
//assert_matches!(v.try_insert(2, 1), Err(CapacityError { .. }));
}
#[test]
fn test_into_inner_1() {
let mut v = ArrayVec::from([1, 2]);
v.pop();
let u = v.clone();
assert_eq!(v.into_inner(), Err(u));
}
#[test]
fn test_into_inner_2() {
let mut v = ArrayVec::<[String; 4]>::new();
v.push("a".into());
v.push("b".into());
v.push("c".into());
v.push("d".into());
assert_eq!(v.into_inner().unwrap(), ["a", "b", "c", "d"]);
}
#[test]
fn test_into_inner_3_() {
let mut v = ArrayVec::<[i32; 4]>::new();
v.extend(1..);
assert_eq!(v.into_inner().unwrap(), [1, 2, 3, 4]);
}
#[cfg(feature="std")]
#[test]
fn test_write() {
use std::io::Write;
let mut v = ArrayVec::<[_; 8]>::new();
write!(&mut v, "\x01\x02\x03").unwrap();
assert_eq!(&v[..], &[1, 2, 3]);
let r = v.write(&[9; 16]).unwrap();
assert_eq!(r, 5);
assert_eq!(&v[..], &[1, 2, 3, 9, 9, 9, 9, 9]);
}
#[test]
fn array_clone_from() {
let mut v = ArrayVec::<[_; 4]>::new();
v.push(vec![1, 2]);
v.push(vec![3, 4, 5]);
v.push(vec![6]);
let reference = v.to_vec();
let mut u = ArrayVec::<[_; 4]>::new();
u.clone_from(&v);
assert_eq!(&u, &reference[..]);
let mut t = ArrayVec::<[_; 4]>::new();
t.push(vec![97]);
t.push(vec![]);
t.push(vec![5, 6, 2]);
t.push(vec![2]);
t.clone_from(&v);
assert_eq!(&t, &reference[..]);
t.clear();
t.clone_from(&v);
assert_eq!(&t, &reference[..]);
}
#[cfg(feature="std")]
#[test]
fn test_string() {
use std::error::Error;
let text = "hello world";
let mut s = ArrayString::<[_; 16]>::new();
s.try_push_str(text).unwrap();
assert_eq!(&s, text);
assert_eq!(text, &s);
// Make sure Hash / Eq / Borrow match up so we can use HashMap
let mut map = HashMap::new();
map.insert(s, 1);
assert_eq!(map[text], 1);
let mut t = ArrayString::<[_; 2]>::new();
assert!(t.try_push_str(text).is_err());
assert_eq!(&t, "");
t.push_str("ab");
// DerefMut
let tmut: &mut str = &mut t;
assert_eq!(tmut, "ab");
// Test Error trait / try
let t = || -> Result<(), Box<dyn Error>> {
let mut t = ArrayString::<[_; 2]>::new();
t.try_push_str(text)?;
Ok(())
}();
assert!(t.is_err());
}
#[test]
fn test_string_from() {
let text = "hello world";
// Test `from` constructor
let u = ArrayString::<[_; 11]>::from(text).unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_parse_from_str() {
let text = "hello world";
let u: ArrayString<[_; 11]> = text.parse().unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_from_bytes() {
let text = "hello world";
let u = ArrayString::from_byte_string(b"hello world").unwrap();
assert_eq!(&u, text);
assert_eq!(u.len(), text.len());
}
#[test]
fn test_string_clone() {
let text = "hi";
let mut s = ArrayString::<[_; 4]>::new();
s.push_str("abcd");
let t = ArrayString::<[_; 4]>::from(text).unwrap();
s.clone_from(&t);
assert_eq!(&t, &s);
}
#[test]
fn test_string_push() {
let text = "abcαβγ";
let mut s = ArrayString::<[_; 8]>::new();
for c in text.chars() {
if let Err(_) = s.try_push(c) {
break;
}
}
assert_eq!("abcαβ", &s[..]);
s.push('x');
assert_eq!("abcαβx", &s[..]);
assert!(s.try_push('x').is_err());
}
#[test]
fn test_insert_at_length() {
let mut v = ArrayVec::<[_; 8]>::new();
let result1 = v.try_insert(0, "a");
let result2 = v.try_insert(1, "b");
assert!(result1.is_ok() && result2.is_ok());
assert_eq!(&v[..], &["a", "b"]);
}
#[should_panic]
#[test]
fn test_insert_out_of_bounds() {
let mut v = ArrayVec::<[_; 8]>::new();
let _ = v.try_insert(1, "test");
}
/*
* insert that pushes out the last
let mut u = ArrayVec::from([1, 2, 3, 4]);
let ret = u.try_insert(3, 99);
assert_eq!(&u[..], &[1, 2, 3, 99]);
assert_matches!(ret, Err(_));
let ret = u.try_insert(4, 77);
assert_eq!(&u[..], &[1, 2, 3, 99]);
assert_matches!(ret, Err(_));
*/
#[test]
fn test_drop_in_insert() {
use std::cell::Cell;
let flag = &Cell::new(0);
struct Bump<'a>(&'a Cell<i32>);
impl<'a> Drop for Bump<'a> {
fn drop(&mut self) {
let n = self.0.get();
self.0.set(n + 1);
}
}
flag.set(0);
{
let mut array = ArrayVec::<[_; 2]>::new();
array.push(Bump(flag));
array.insert(0, Bump(flag));
assert_eq!(flag.get(), 0);
let ret = array.try_insert(1, Bump(flag));
assert_eq!(flag.get(), 0);
assert_matches!(ret, Err(_));
drop(ret);
assert_eq!(flag.get(), 1);
}
assert_eq!(flag.get(), 3);
}
#[test]
fn test_pop_at() {
let mut v = ArrayVec::<[String; 4]>::new();
let s = String::from;
v.push(s("a"));
v.push(s("b"));
v.push(s("c"));
v.push(s("d"));
assert_eq!(v.pop_at(4), None);
assert_eq!(v.pop_at(1), Some(s("b")));
assert_eq!(v.pop_at(1), Some(s("c")));
assert_eq!(v.pop_at(2), None);
assert_eq!(&v[..], &["a", "d"]);
}
#[test]
fn test_sizes() {
let v = ArrayVec::from([0u8; 1 << 16]);
assert_eq!(vec![0u8; v.len()], &v[..]);
}
#[test]
fn test_default() {
use std::net;
let s: ArrayString<[u8; 4]> = Default::default();
// Something without `Default` implementation.
let v: ArrayVec<[net::TcpStream; 4]> = Default::default();
assert_eq!(s.len(), 0);
assert_eq!(v.len(), 0);
}
#[cfg(feature="array-sizes-33-128")]
#[test]
fn test_sizes_33_128() {
ArrayVec::from([0u8; 52]);
ArrayVec::from([0u8; 127]);
}
#[cfg(feature="array-sizes-129-255")]
#[test]
fn test_sizes_129_255() {
ArrayVec::from([0u8; 237]);
ArrayVec::from([0u8; 255]);
}
#[test]
fn test_extend_zst() {
let mut range = 0..10;
#[derive(Copy, Clone, PartialEq, Debug)]
struct Z; // Zero sized type
let mut array: ArrayVec<[_; 5]> = range.by_ref().map(|_| Z).collect();
assert_eq!(&array[..], &[Z; 5]);
assert_eq!(range.next(), Some(5));
array.extend(range.by_ref().map(|_| Z));
assert_eq!(range.next(), Some(6));
let mut array: ArrayVec<[_; 10]> = (0..3).map(|_| Z).collect();
assert_eq!(&array[..], &[Z; 3]);
array.extend((3..5).map(|_| Z));
assert_eq!(&array[..], &[Z; 5]);
assert_eq!(array.len(), 5);
}
|
extern crate edit_distance;
#[test]
fn simple() {
assert_eq!(edit_distance::edit_distance("kitten", "sitting"), 3);
assert_eq!(edit_distance::edit_distance("Tier", "Tor"), 2);
}
#[test]
fn same() {
assert_eq!(edit_distance::edit_distance("kitten", "kitten"), 0);
}
#[test]
fn empty_a() {
assert_eq!(edit_distance::edit_distance("", "kitten"), 6);
}
#[test]
fn empty_b() {
assert_eq!(edit_distance::edit_distance("sitting", ""), 7);
}
#[test]
fn empty_both() {
assert_eq!(edit_distance::edit_distance("", ""), 0);
}
#[test]
fn unicode_misc() {
assert_eq!(edit_distance::edit_distance("üö", "uo"), 2);
}
#[test]
fn unicode_thai() {
assert_eq!(edit_distance::edit_distance("ฎ ฏ ฐ", "a b c"), 3);
}
#[test]
fn unicode_misc_equal() {
assert_eq!(edit_distance::edit_distance("☀☂☃☄", "☀☂☃☄"), 0);
}
extern crate quickcheck;
use quickcheck::quickcheck;
#[test]
fn at_least_size_difference_property() {
fn at_least_size_difference(a: String, b: String) -> bool {
let size_a = a.chars().count() as i32;
let size_b = b.chars().count() as i32;
let diff = (size_a - size_b).abs();
edit_distance::edit_distance(&a, &b) >= diff
}
quickcheck(at_least_size_difference as fn(a: String, b: String) -> bool);
}
#[test]
fn at_most_length_of_longer_property() {
fn at_most_size_of_longer(a: String, b: String) -> bool {
let upper_bound = *[a.chars().count(),
b.chars().count()]
.iter()
.max()
.unwrap() as i32;
edit_distance::edit_distance(&a, &b) <= upper_bound
}
quickcheck(at_most_size_of_longer as fn(a: String, b: String) -> bool);
}
Only zero iff both string are equal
Property test to verify that implementation adheres to the property of
being zero iff the string are equal.
extern crate edit_distance;
#[test]
fn simple() {
assert_eq!(edit_distance::edit_distance("kitten", "sitting"), 3);
assert_eq!(edit_distance::edit_distance("Tier", "Tor"), 2);
}
#[test]
fn same() {
assert_eq!(edit_distance::edit_distance("kitten", "kitten"), 0);
}
#[test]
fn empty_a() {
assert_eq!(edit_distance::edit_distance("", "kitten"), 6);
}
#[test]
fn empty_b() {
assert_eq!(edit_distance::edit_distance("sitting", ""), 7);
}
#[test]
fn empty_both() {
assert_eq!(edit_distance::edit_distance("", ""), 0);
}
#[test]
fn unicode_misc() {
assert_eq!(edit_distance::edit_distance("üö", "uo"), 2);
}
#[test]
fn unicode_thai() {
assert_eq!(edit_distance::edit_distance("ฎ ฏ ฐ", "a b c"), 3);
}
#[test]
fn unicode_misc_equal() {
assert_eq!(edit_distance::edit_distance("☀☂☃☄", "☀☂☃☄"), 0);
}
extern crate quickcheck;
use quickcheck::quickcheck;
#[test]
fn at_least_size_difference_property() {
fn at_least_size_difference(a: String, b: String) -> bool {
let size_a = a.chars().count() as i32;
let size_b = b.chars().count() as i32;
let diff = (size_a - size_b).abs();
edit_distance::edit_distance(&a, &b) >= diff
}
quickcheck(at_least_size_difference as fn(a: String, b: String) -> bool);
}
#[test]
fn at_most_length_of_longer_property() {
fn at_most_size_of_longer(a: String, b: String) -> bool {
let upper_bound = *[a.chars().count(),
b.chars().count()]
.iter()
.max()
.unwrap() as i32;
edit_distance::edit_distance(&a, &b) <= upper_bound
}
quickcheck(at_most_size_of_longer as fn(a: String, b: String) -> bool);
}
#[test]
fn zero_iff_a_equals_b_property() {
fn zero_iff_a_equals_b(a: String, b: String) -> bool {
let d = edit_distance::edit_distance(&a, &b);
if a == b {
d == 0
} else {
d > 0
}
}
quickcheck(zero_iff_a_equals_b as fn(a: String, b: String) -> bool);
}
|
extern crate word2vec;
use word2vec::wordvectors::WordVector;
const PATH: &'static str = "vectors.bin";
#[test]
fn test_word_cosine()
{
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.cosine("winter", 10);
match result {
Some(res) => {
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"winter"))
},
None => assert!(false),
}
}
#[test]
fn test_unexisting_word_cosine() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.cosine("somenotexistingword", 10);
match result {
Some(_) => assert!(false),
None => assert!(true),
}
}
#[test]
fn test_word_analogy() {
let model = WordVector::load_from_binary(PATH).unwrap();
let mut pos = Vec::new();
pos.push("woman");
pos.push("king");
let mut neg = Vec::new();
neg.push("man");
let result = model.analogy(pos, neg, 10);
match result {
Some(res) => {
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"woman"));
assert!(!only_words.contains(&"king"));
assert!(!only_words.contains(&"man"));
},
None => assert!(false),
}
}
#[test]
fn test_word_analogy_with_empty_params() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.analogy(Vec::new(), Vec::new(), 10);
match result {
Some(_) => assert!(false),
None => assert!(true),
}
}
add test
- add test
- improve two other
extern crate word2vec;
use word2vec::wordvectors::WordVector;
const PATH: &'static str = "vectors.bin";
#[test]
fn test_word_cosine()
{
let model = WordVector::load_from_binary(PATH).unwrap();
let res = model.cosine("winter", 10).expect("word not found in vocabulary");
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"winter"))
}
#[test]
fn test_unexisting_word_cosine() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.cosine("somenotexistingword", 10);
match result {
Some(_) => assert!(false),
None => assert!(true),
}
}
#[test]
fn test_word_analogy() {
let model = WordVector::load_from_binary(PATH).unwrap();
let mut pos = Vec::new();
pos.push("woman");
pos.push("king");
let mut neg = Vec::new();
neg.push("man");
let res = model.analogy(pos, neg, 10).expect("couldn't find all of the given words");
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"woman"));
assert!(!only_words.contains(&"king"));
assert!(!only_words.contains(&"man"));
}
#[test]
fn test_word_analogy_with_empty_params() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.analogy(Vec::new(), Vec::new(), 10);
match result {
Some(_) => assert!(false),
None => assert!(true),
}
}
#[test]
fn test_word_count_is_correctly_returned() {
let v = WordVector::load_from_binary(PATH).unwrap();
assert_eq!(v.word_count(), 71291);
}
|
fn test_str(exe: &str, s: &str) {
use std::process::{Command, Stdio};
use std::io::Write;
use std::str::from_utf8;
let mut cmd = Command::new(exe)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.unwrap();
println!("started cmd");
write!(cmd.stdin.as_mut().unwrap(), "{}", s).unwrap();
println!("wrote to stdin");
let res = cmd.wait_with_output().unwrap();
println!("stdout: {}", from_utf8(&res.stdout).unwrap());
println!("stderr: {}", from_utf8(&res.stderr).unwrap());
println!("status: {:?}", res.status);
assert!(res.status.success());
}
#[test]
fn run_read() {
test_str("target/debug/examples/test_read", "the answer™: 42");
}
#[test]
fn run_read_simple() {
test_str("target/debug/examples/test_read_simple", "99\n");
}
#[test]
fn run_scan_simple() {
test_str("target/debug/examples/test_scan_simple", "42");
}
#[test]
fn run_scan() {
test_str("target/debug/examples/test_scan", "99, 42\n66");
}
Rustfmt
fn test_str(exe: &str, s: &str) {
use std::io::Write;
use std::process::{Command, Stdio};
use std::str::from_utf8;
let mut cmd = Command::new(exe)
.stdin(Stdio::piped())
.stderr(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.unwrap();
println!("started cmd");
write!(cmd.stdin.as_mut().unwrap(), "{}", s).unwrap();
println!("wrote to stdin");
let res = cmd.wait_with_output().unwrap();
println!("stdout: {}", from_utf8(&res.stdout).unwrap());
println!("stderr: {}", from_utf8(&res.stderr).unwrap());
println!("status: {:?}", res.status);
assert!(res.status.success());
}
#[test]
fn run_read() {
test_str("target/debug/examples/test_read", "the answer™: 42");
}
#[test]
fn run_read_simple() {
test_str("target/debug/examples/test_read_simple", "99\n");
}
#[test]
fn run_scan_simple() {
test_str("target/debug/examples/test_scan_simple", "42");
}
#[test]
fn run_scan() {
test_str("target/debug/examples/test_scan", "99, 42\n66");
}
|
use std::rc::Rc;
use std::collections::{BTreeMap, HashMap};
use std::collections::HashSet;
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::u32;
use regex::Regex;
use db::*;
use types::*;
use web::data::*;
use web::config::*;
use web::cv_summary::make_cv_summaries;
use web::util::cmp_str_dates;
use interpro::UniprotResult;
fn make_organism(rc_organism: &Rc<Organism>) -> ConfigOrganism {
let mut maybe_taxonid: Option<u32> = None;
for prop in rc_organism.organismprops.borrow().iter() {
if prop.prop_type.name == "taxon_id" {
maybe_taxonid = Some(prop.value.parse().unwrap());
}
}
ConfigOrganism {
taxonid: maybe_taxonid.unwrap(),
genus: rc_organism.genus.clone(),
species: rc_organism.species.clone(),
}
}
type TermShortOptionMap = HashMap<TermId, Option<TermShort>>;
type UniprotIdentifier = String;
pub struct WebDataBuild<'a> {
raw: &'a Raw,
domain_data: &'a HashMap<UniprotIdentifier, UniprotResult>,
config: &'a Config,
genes: UniquenameGeneMap,
genotypes: UniquenameGenotypeMap,
genotype_backgrounds: HashMap<GenotypeUniquename, String>,
alleles: UniquenameAlleleMap,
other_features: UniquenameFeatureShortMap,
terms: TermIdDetailsMap,
chromosomes: ChrNameDetailsMap,
references: UniquenameReferenceMap,
all_ont_annotations: HashMap<TermId, Vec<OntAnnotationId>>,
all_not_ont_annotations: HashMap<TermId, Vec<OntAnnotationId>>,
genes_of_transcripts: HashMap<String, String>,
transcripts_of_polypeptides: HashMap<String, String>,
parts_of_transcripts: HashMap<String, Vec<FeatureShort>>,
genes_of_alleles: HashMap<String, String>,
alleles_of_genotypes: HashMap<String, Vec<ExpressedAllele>>,
// a map from IDs of terms from the "PomBase annotation extension terms" cv
// to a Vec of the details of each of the extension
parts_of_extensions: HashMap<TermId, Vec<ExtPart>>,
base_term_of_extensions: HashMap<TermId, TermId>,
children_by_termid: HashMap<TermId, HashSet<TermId>>,
dbxrefs_of_features: HashMap<String, HashSet<String>>,
possible_interesting_parents: HashSet<InterestingParent>,
recent_references: RecentReferences,
all_community_curated: Vec<ReferenceShort>,
term_subsets: IdTermSubsetMap,
gene_subsets: IdGeneSubsetMap,
annotation_details: IdOntAnnotationDetailMap,
}
fn get_maps() ->
(HashMap<String, ReferenceShortOptionMap>,
HashMap<String, GeneShortOptionMap>,
HashMap<String, GenotypeShortMap>,
HashMap<String, AlleleShortMap>,
HashMap<GeneUniquename, TermShortOptionMap>)
{
(HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new())
}
fn get_feat_rel_expression(feature: &Feature,
feature_relationship: &FeatureRelationship) -> Option<String> {
for feature_prop in feature.featureprops.borrow().iter() {
if feature_prop.prop_type.name == "allele_type" {
if let Some(ref value) = feature_prop.value {
if value == "deletion" {
return Some("Null".into());
}
}
}
}
for rel_prop in feature_relationship.feature_relationshipprops.borrow().iter() {
if rel_prop.prop_type.name == "expression" {
return rel_prop.value.clone();
}
}
None
}
fn reference_has_annotation(reference_details: &ReferenceDetails) -> bool {
!reference_details.cv_annotations.is_empty() ||
!reference_details.physical_interactions.is_empty() ||
!reference_details.genetic_interactions.is_empty() ||
!reference_details.ortholog_annotations.is_empty() ||
!reference_details.paralog_annotations.is_empty()
}
fn is_gene_type(feature_type_name: &str) -> bool {
feature_type_name == "gene" || feature_type_name == "pseudogene"
}
pub fn compare_ext_part_with_config(config: &Config, ep1: &ExtPart, ep2: &ExtPart) -> Ordering {
let rel_order_conf = &config.extension_relation_order;
let order_conf = &rel_order_conf.relation_order;
let always_last_conf = &rel_order_conf.always_last;
let maybe_ep1_index = order_conf.iter().position(|r| *r == ep1.rel_type_name);
let maybe_ep2_index = order_conf.iter().position(|r| *r == ep2.rel_type_name);
if let Some(ep1_index) = maybe_ep1_index {
if let Some(ep2_index) = maybe_ep2_index {
ep1_index.cmp(&ep2_index)
} else {
Ordering::Less
}
} else {
if maybe_ep2_index.is_some() {
Ordering::Greater
} else {
let maybe_ep1_last_index = always_last_conf.iter().position(|r| *r == ep1.rel_type_name);
let maybe_ep2_last_index = always_last_conf.iter().position(|r| *r == ep2.rel_type_name);
if let Some(ep1_last_index) = maybe_ep1_last_index {
if let Some(ep2_last_index) = maybe_ep2_last_index {
ep1_last_index.cmp(&ep2_last_index)
} else {
Ordering::Greater
}
} else {
if maybe_ep2_last_index.is_some() {
Ordering::Less
} else {
let name_cmp = ep1.rel_type_name.cmp(&ep2.rel_type_name);
if name_cmp == Ordering::Equal {
if ep1.ext_range.is_gene() && !ep2.ext_range.is_gene() {
Ordering::Less
} else {
if !ep1.ext_range.is_gene() && ep2.ext_range.is_gene() {
Ordering::Greater
} else {
Ordering::Equal
}
}
} else {
name_cmp
}
}
}
}
}
}
fn string_from_ext_range(ext_range: &ExtRange,
genes: &UniquenameGeneMap, terms: &TermIdDetailsMap) -> String {
match *ext_range {
ExtRange::Gene(ref gene_uniquename) => {
let gene = genes.get(gene_uniquename)
.unwrap_or_else(|| panic!("can't find gene: {}", gene_uniquename));
gene_display_name(gene)
},
ExtRange::Promoter(ref promoter_uniquename) => promoter_uniquename.clone(),
ExtRange::SummaryGenes(_) => panic!("can't handle SummaryGenes\n"),
ExtRange::Term(ref termid) => terms.get(termid).unwrap().name.clone(),
ExtRange::SummaryTerms(_) => panic!("can't handle SummaryGenes\n"),
ExtRange::Misc(ref misc) => misc.clone(),
ExtRange::Domain(ref domain) => domain.clone(),
ExtRange::GeneProduct(ref gene_product) => gene_product.clone(),
}
}
fn cmp_ext_part(ext_part1: &ExtPart, ext_part2: &ExtPart,
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let ord = ext_part1.rel_type_display_name.cmp(&ext_part2.rel_type_display_name);
if ord == Ordering::Equal {
let ext_part1_str = string_from_ext_range(&ext_part1.ext_range, genes, terms);
let ext_part2_str = string_from_ext_range(&ext_part2.ext_range, genes, terms);
ext_part1_str.to_lowercase().cmp(&ext_part2_str.to_lowercase())
} else {
ord
}
}
// compare the extension up to the last common index
fn cmp_extension_prefix(cv_config: &CvConfig, ext1: &[ExtPart], ext2: &[ExtPart],
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let conf_rel_ranges = &cv_config.summary_relation_ranges_to_collect;
let is_grouping_rel_name =
|ext: &ExtPart| !conf_rel_ranges.contains(&ext.rel_type_name);
// put the extension that will be grouped in the summary at the end
// See: https://github.com/pombase/pombase-chado/issues/636
let (mut ext1_for_cmp, ext1_rest): (Vec<ExtPart>, Vec<ExtPart>) =
ext1.to_vec().into_iter().partition(&is_grouping_rel_name);
ext1_for_cmp.extend(ext1_rest.into_iter());
let (mut ext2_for_cmp, ext2_rest): (Vec<ExtPart>, Vec<ExtPart>) =
ext2.to_vec().into_iter().partition(&is_grouping_rel_name);
ext2_for_cmp.extend(ext2_rest.into_iter());
let iter = ext1_for_cmp.iter().zip(&ext2_for_cmp).enumerate();
for (_, (ext1_part, ext2_part)) in iter {
let ord = cmp_ext_part(ext1_part, ext2_part, genes, terms);
if ord != Ordering::Equal {
return ord
}
}
Ordering::Equal
}
fn cmp_extension(cv_config: &CvConfig, ext1: &[ExtPart], ext2: &[ExtPart],
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let cmp = cmp_extension_prefix(cv_config, ext1, ext2, genes, terms);
if cmp == Ordering::Equal {
ext1.len().cmp(&ext2.len())
} else {
cmp
}
}
fn cmp_genotypes(genotype1: &GenotypeDetails, genotype2: &GenotypeDetails) -> Ordering {
genotype1.display_uniquename.to_lowercase().cmp(&genotype2.display_uniquename.to_lowercase())
}
fn allele_display_name(allele: &AlleleShort) -> String {
let name = allele.name.clone().unwrap_or_else(|| "unnamed".into());
let allele_type = allele.allele_type.clone();
let description = allele.description.clone().unwrap_or_else(|| allele_type.clone());
if allele_type == "deletion" && name.ends_with("delta") ||
allele_type.starts_with("wild_type") && name.ends_with('+') {
let normalised_description = description.replace("[\\s_]+", "");
let normalised_allele_type = allele_type.replace("[\\s_]+", "");
if normalised_description != normalised_allele_type {
return name + "(" + description.as_str() + ")";
} else {
return name;
}
}
if allele_type == "deletion" {
name + "-" + description.as_str()
} else {
name + "-" + description.as_str() + "-" + &allele.allele_type
}
}
fn gene_display_name(gene: &GeneDetails) -> String {
if let Some(name) = gene.name.clone() {
name
} else {
gene.uniquename.clone()
}
}
pub fn make_genotype_display_name(genotype_expressed_alleles: &[ExpressedAllele],
allele_map: &UniquenameAlleleMap) -> String {
let mut allele_display_names: Vec<String> =
genotype_expressed_alleles.iter().map(|expressed_allele| {
let allele_short = allele_map.get(&expressed_allele.allele_uniquename).unwrap();
let mut display_name = allele_display_name(allele_short);
if allele_short.allele_type != "deletion" {
if display_name == "unnamed-unrecorded-unrecorded" {
display_name = format!("{}-{}", allele_short.gene_uniquename,
display_name);
}
if let Some(ref expression) = expressed_allele.expression {
display_name += &format!("-expression-{}", expression.to_lowercase());
}
}
display_name
}).collect();
allele_display_names.sort();
let joined_alleles = allele_display_names.join(" ");
str::replace(&joined_alleles, " ", "_")
}
fn make_phase(feature_loc: &Featureloc) -> Option<Phase> {
if let Some(phase) = feature_loc.phase {
match phase {
0 => Some(Phase::Zero),
1 => Some(Phase::One),
2 => Some(Phase::Two),
_ => panic!(),
}
} else {
None
}
}
fn make_location(chromosome_map: &ChrNameDetailsMap,
feat: &Feature) -> Option<ChromosomeLocation> {
let feature_locs = feat.featurelocs.borrow();
match feature_locs.get(0) {
Some(feature_loc) => {
let start_pos =
if feature_loc.fmin + 1 >= 1 {
(feature_loc.fmin + 1) as u32
} else {
panic!("start_pos less than 1");
};
let end_pos =
if feature_loc.fmax >= 1 {
feature_loc.fmax as u32
} else {
panic!("start_end less than 1");
};
let feature_uniquename = &feature_loc.srcfeature.uniquename;
let chr_short = make_chromosome_short(chromosome_map, feature_uniquename);
Some(ChromosomeLocation {
chromosome_name: chr_short.name,
start_pos,
end_pos,
strand: match feature_loc.strand {
1 => Strand::Forward,
-1 => Strand::Reverse,
_ => panic!(),
},
phase: make_phase(&feature_loc),
})
},
None => None,
}
}
fn complement_char(base: char) -> char {
match base {
'a' => 't',
'A' => 'T',
't' => 'a',
'T' => 'A',
'g' => 'c',
'G' => 'C',
'c' => 'g',
'C' => 'G',
_ => 'n',
}
}
fn rev_comp(residues: &str) -> Residues {
residues.chars()
.rev().map(complement_char)
.collect()
}
fn get_loc_residues(chr: &ChromosomeDetails,
loc: &ChromosomeLocation) -> Residues {
let start = (loc.start_pos - 1) as usize;
let end = loc.end_pos as usize;
let residues: Residues = chr.residues[start..end].into();
if loc.strand == Strand::Forward {
residues
} else {
rev_comp(&residues)
}
}
fn make_feature_short(chromosome_map: &ChrNameDetailsMap, feat: &Feature) -> FeatureShort {
let maybe_loc = make_location(chromosome_map, feat);
if let Some(loc) = maybe_loc {
if let Some(chr) = chromosome_map.get(&loc.chromosome_name) {
let residues = get_loc_residues(chr, &loc);
let feature_type = match &feat.feat_type.name as &str {
"five_prime_UTR" => FeatureType::FivePrimeUtr,
"pseudogenic_exon" | "exon" => FeatureType::Exon,
"three_prime_UTR" => FeatureType::ThreePrimeUtr,
"dg_repeat" => FeatureType::DGRepeat,
"dh_repeat" => FeatureType::DHRepeat,
"gap" => FeatureType::Gap,
"gene_group" => FeatureType::GeneGroup,
"long_terminal_repeat" => FeatureType::LongTerminalRepeat,
"low_complexity_region" => FeatureType::LowComplexityRegion,
"LTR_retrotransposon" => FeatureType::LTRRetrotransposon,
"mating_type_region" => FeatureType::MatingTypeRegion,
"nuclear_mt_pseudogene" => FeatureType::NuclearMtPseudogene,
"origin_of_replication" => FeatureType::OriginOfReplication,
"polyA_signal_sequence" => FeatureType::PolyASignalSequence,
"polyA_site" => FeatureType::PolyASite,
"promoter" => FeatureType::Promoter,
"region" => FeatureType::Region,
"regional_centromere" => FeatureType::RegionalCentromere,
"regional_centromere_central_core" => FeatureType::RegionalCentromereCentralCore,
"regional_centromere_inner_repeat_region" => FeatureType::RegionalCentromereInnerRepeatRegion,
"repeat_region" => FeatureType::RepeatRegion,
"TR_box" => FeatureType::TRBox,
"SNP" => FeatureType::SNP,
_ => panic!("can't handle feature type: {}", feat.feat_type.name),
};
FeatureShort {
feature_type,
uniquename: feat.uniquename.clone(),
location: loc,
residues,
}
} else {
panic!("can't find chromosome {}", loc.chromosome_name);
}
} else {
panic!("{} has no featureloc", feat.uniquename);
}
}
pub fn make_chromosome_short<'a>(chromosome_map: &'a ChrNameDetailsMap,
chromosome_name: &'a str) -> ChromosomeShort {
if let Some(chr) = chromosome_map.get(chromosome_name) {
chr.make_chromosome_short()
} else {
panic!("can't find chromosome: {}", chromosome_name);
}
}
fn make_gene_short<'b>(gene_map: &'b UniquenameGeneMap,
gene_uniquename: &'b str) -> GeneShort {
if let Some(gene_details) = gene_map.get(gene_uniquename) {
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_reference_short<'a>(reference_map: &'a UniquenameReferenceMap,
reference_uniquename: &str) -> Option<ReferenceShort> {
if reference_uniquename == "null" {
None
} else {
let reference_details = reference_map.get(reference_uniquename)
.unwrap_or_else(|| panic!("missing reference in make_reference_short(): {}",
reference_uniquename));
let reference_short = ReferenceShort::from_reference_details(reference_details);
Some(reference_short)
}
}
// compare two gene vectors which must be ordered vecs
fn cmp_gene_vec(genes: &UniquenameGeneMap,
gene_vec1: &[GeneUniquename],
gene_vec2: &[GeneUniquename]) -> Ordering {
let gene_short_vec1: Vec<GeneShort> =
gene_vec1.iter().map(|gene_uniquename: &String| {
make_gene_short(genes, gene_uniquename)
}).collect();
let gene_short_vec2: Vec<GeneShort> =
gene_vec2.iter().map(|gene_uniquename: &String| {
make_gene_short(genes, gene_uniquename)
}).collect();
gene_short_vec1.cmp(&gene_short_vec2)
}
lazy_static
! {
static ref MODIFICATION_RE: Regex = Regex::new(r"^(?P<aa>[A-Z])(?P<pos>\d+)$").unwrap();
}
fn cmp_residues(residue1: &Option<Residue>, residue2: &Option<Residue>) -> Ordering {
if let Some(ref res1) = *residue1 {
if let Some(ref res2) = *residue2 {
if let (Some(res1_captures), Some(res2_captures)) =
(MODIFICATION_RE.captures(res1), MODIFICATION_RE.captures(res2))
{
let res1_aa = res1_captures.name("aa").unwrap().as_str();
let res2_aa = res2_captures.name("aa").unwrap().as_str();
let aa_order = res1_aa.cmp(&res2_aa);
if aa_order == Ordering::Equal {
let res1_pos =
res1_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
let res2_pos =
res2_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
res1_pos.cmp(&res2_pos)
} else {
aa_order
}
} else {
res1.cmp(&res2)
}
} else {
Ordering::Less
}
} else {
if residue2.is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
}
}
pub fn cmp_ont_annotation_detail(cv_config: &CvConfig,
detail1: &OntAnnotationDetail,
detail2: &OntAnnotationDetail,
genes: &UniquenameGeneMap,
genotypes: &UniquenameGenotypeMap,
terms: &TermIdDetailsMap) -> Result<Ordering, String> {
if let Some(ref detail1_genotype_uniquename) = detail1.genotype {
if let Some(ref detail2_genotype_uniquename) = detail2.genotype {
let genotype1 = genotypes.get(detail1_genotype_uniquename).unwrap();
let genotype2 = genotypes.get(detail2_genotype_uniquename).unwrap();
let ord = cmp_genotypes(genotype1, genotype2);
if ord == Ordering::Equal {
Ok(cmp_extension(cv_config, &detail1.extension, &detail2.extension,
genes, terms))
} else {
Ok(ord)
}
} else {
Err(format!("comparing two OntAnnotationDetail but one has a genotype and
one a gene:\n{:?}\n{:?}\n", detail1, detail2))
}
} else {
if detail2.genotype.is_some() {
Err(format!("comparing two OntAnnotationDetail but one has a genotype and
one a gene:\n{:?}\n{:?}\n", detail1, detail2))
} else {
let ord = cmp_gene_vec(genes, &detail1.genes, &detail2.genes);
if ord == Ordering::Equal {
if let Some(ref sort_details_by) = cv_config.sort_details_by {
for sort_type in sort_details_by {
if sort_type == "modification" {
let res = cmp_residues(&detail1.residue, &detail2.residue);
if res != Ordering::Equal {
return Ok(res);
}
} else {
let res = cmp_extension(cv_config, &detail1.extension,
&detail2.extension, genes, terms);
if res != Ordering::Equal {
return Ok(res);
}
}
}
Ok(Ordering::Equal)
} else {
Ok(cmp_extension(cv_config, &detail1.extension, &detail2.extension,
genes, terms))
}
} else {
Ok(ord)
}
}
}
}
// Some ancestor terms are useful in the web code. This function uses the Config and returns
// the terms that might be useful.
fn get_possible_interesting_parents(config: &Config) -> HashSet<InterestingParent> {
let mut ret = HashSet::new();
for parent_conf in &config.interesting_parents {
ret.insert(parent_conf.clone());
}
for ext_conf in &config.extension_display_names {
if let Some(ref conf_termid) = ext_conf.if_descendent_of {
ret.insert(InterestingParent {
termid: conf_termid.clone(),
rel_name: "is_a".into(),
});
}
}
for go_slim_conf in &config.go_slim_terms {
for rel_name in &DESCENDANT_REL_NAMES {
ret.insert(InterestingParent {
termid: go_slim_conf.termid.clone(),
rel_name: (*rel_name).to_owned(),
});
}
}
for query_data_go_conf in &config.query_data_config.go_components {
for rel_name in &DESCENDANT_REL_NAMES {
ret.insert(InterestingParent {
termid: query_data_go_conf.clone(),
rel_name: (*rel_name).to_owned(),
});
}
}
ret.insert(InterestingParent {
termid: config.viability_terms.viable.clone(),
rel_name: "is_a".into(),
});
ret.insert(InterestingParent {
termid: config.viability_terms.inviable.clone(),
rel_name: "is_a".into(),
});
for (cv_name, conf) in &config.cv_config {
for filter in &conf.filters {
for category in &filter.term_categories {
for ancestor in &category.ancestors {
for config_rel_name in &DESCENDANT_REL_NAMES {
if *config_rel_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&cv_name.as_str()) {
continue;
}
ret.insert(InterestingParent {
termid: ancestor.clone(),
rel_name: String::from(*config_rel_name),
});
}
}
}
}
for split_by_parent_config in &conf.split_by_parents {
for ancestor in &split_by_parent_config.termids {
let ancestor_termid =
if ancestor.starts_with("NOT ") {
ancestor[4..].to_owned()
} else {
ancestor.clone()
};
ret.insert(InterestingParent {
termid: ancestor_termid,
rel_name: "is_a".into(),
});
}
}
}
ret
}
const MAX_RECENT_REFS: usize = 20;
fn make_recently_added(references_map: &UniquenameReferenceMap,
all_ref_uniquenames: &[String]) -> Vec<ReferenceShort> {
let mut date_sorted_pub_uniquenames = all_ref_uniquenames.to_owned();
{
let ref_added_date_cmp =
|ref_uniquename1: &ReferenceUniquename, ref_uniquename2: &ReferenceUniquename| {
let ref1 = references_map.get(ref_uniquename1).unwrap();
let ref2 = references_map.get(ref_uniquename2).unwrap();
if let Some(ref ref1_added_date) = ref1.canto_added_date {
if let Some(ref ref2_added_date) = ref2.canto_added_date {
cmp_str_dates(ref1_added_date, ref2_added_date).reverse()
} else {
Ordering::Less
}
} else {
if ref2.canto_added_date.is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
}
};
date_sorted_pub_uniquenames.sort_by(ref_added_date_cmp);
}
let recently_added_iter =
date_sorted_pub_uniquenames.iter().take(MAX_RECENT_REFS);
let mut recently_added: Vec<ReferenceShort> = vec![];
for ref_uniquename in recently_added_iter {
let ref_short_maybe = make_reference_short(references_map, ref_uniquename);
if let Some(ref_short) = ref_short_maybe {
recently_added.push(ref_short);
}
}
recently_added
}
fn make_canto_curated(references_map: &UniquenameReferenceMap,
all_ref_uniquenames: &[String])
-> (Vec<ReferenceShort>, Vec<ReferenceShort>, Vec<ReferenceShort>) {
let mut sorted_pub_uniquenames: Vec<ReferenceUniquename> =
all_ref_uniquenames.iter()
.filter(|ref_uniquename| {
let reference = references_map.get(*ref_uniquename).unwrap();
(reference.canto_first_approved_date.is_some() ||
reference.canto_session_submitted_date.is_some()) &&
reference.canto_curator_role.is_some()
})
.cloned()
.collect();
{
let pub_date_cmp =
|ref_uniquename1: &ReferenceUniquename, ref_uniquename2: &ReferenceUniquename| {
let ref1 = references_map.get(ref_uniquename1).unwrap();
let ref2 = references_map.get(ref_uniquename2).unwrap();
// use first approval date, but fall back to the most recent approval date
let ref1_date =
ref1.canto_first_approved_date.as_ref()
.unwrap_or_else(|| ref1.canto_approved_date.as_ref()
.unwrap_or_else(|| ref1.canto_session_submitted_date.
as_ref().unwrap()));
let ref2_date =
ref2.canto_first_approved_date.as_ref()
.unwrap_or_else(|| ref2.canto_approved_date.as_ref()
.unwrap_or_else(|| ref2.canto_session_submitted_date.
as_ref().unwrap()));
cmp_str_dates(ref2_date, ref1_date)
};
sorted_pub_uniquenames.sort_by(pub_date_cmp);
}
let mut recent_admin_curated = vec![];
let mut recent_community_curated = vec![];
let mut all_community_curated = vec![];
let ref_uniquename_iter = sorted_pub_uniquenames.iter();
for ref_uniquename in ref_uniquename_iter {
let reference = references_map.get(ref_uniquename).unwrap();
if reference.canto_curator_role == Some("community".into()) {
let ref_short = make_reference_short(references_map, ref_uniquename).unwrap();
all_community_curated.push(ref_short.clone());
if recent_community_curated.len() <= MAX_RECENT_REFS {
recent_community_curated.push(ref_short);
}
} else {
if recent_admin_curated.len() <= MAX_RECENT_REFS {
let ref_short = make_reference_short(references_map, ref_uniquename).unwrap();
recent_admin_curated.push(ref_short);
}
}
}
(recent_admin_curated, recent_community_curated, all_community_curated)
}
fn add_introns_to_transcript(chromosome: &ChromosomeDetails,
transcript_uniquename: &str, parts: &mut Vec<FeatureShort>) {
let mut new_parts: Vec<FeatureShort> = vec![];
let mut intron_count = 0;
for part in parts.drain(0..) {
let mut maybe_new_intron = None;
if let Some(prev_part) = new_parts.last() {
let intron_start = prev_part.location.end_pos + 1;
let intron_end = part.location.start_pos - 1;
if intron_start > intron_end {
if intron_start > intron_end + 1 {
println!("no gap between exons at {}..{} in {}", intron_start, intron_end,
transcript_uniquename);
}
// if intron_start == intron_end-1 then it is a one base overlap that
// represents a frameshift in the reference See:
// https://github.com/pombase/curation/issues/1453#issuecomment-303214177
} else {
intron_count += 1;
let new_intron_loc = ChromosomeLocation {
chromosome_name: prev_part.location.chromosome_name.clone(),
start_pos: intron_start,
end_pos: intron_end,
strand: prev_part.location.strand.clone(),
phase: None,
};
let intron_uniquename =
format!("{}:intron:{}", transcript_uniquename, intron_count);
let intron_residues = get_loc_residues(chromosome, &new_intron_loc);
let intron_type =
if prev_part.feature_type == FeatureType::Exon &&
part.feature_type == FeatureType::Exon {
FeatureType::CdsIntron
} else {
if prev_part.feature_type == FeatureType::FivePrimeUtr {
FeatureType::FivePrimeUtrIntron
} else {
FeatureType::ThreePrimeUtrIntron
}
};
maybe_new_intron = Some(FeatureShort {
feature_type: intron_type,
uniquename: intron_uniquename,
location: new_intron_loc,
residues: intron_residues,
});
}
}
if let Some(new_intron) = maybe_new_intron {
new_parts.push(new_intron);
}
new_parts.push(part);
}
*parts = new_parts;
}
fn validate_transcript_parts(transcript_uniquename: &str, parts: &[FeatureShort]) {
let mut seen_exon = false;
for part in parts {
if part.feature_type == FeatureType::Exon {
seen_exon = true;
break;
}
}
if !seen_exon {
panic!("transcript has no exons: {}", transcript_uniquename);
}
if parts[0].feature_type != FeatureType::Exon {
for i in 1..parts.len() {
let part = &parts[i];
if part.feature_type == FeatureType::Exon {
let last_utr_before_exons = &parts[i-1];
let first_exon = &parts[i];
if last_utr_before_exons.location.end_pos + 1 != first_exon.location.start_pos {
println!("{} and exon don't meet up: {} at pos {}",
last_utr_before_exons.feature_type, transcript_uniquename,
last_utr_before_exons.location.end_pos);
}
break;
} else {
if part.location.strand == Strand::Forward {
if part.feature_type != FeatureType::FivePrimeUtr {
println!("{:?}", parts);
panic!("wrong feature type '{}' before exons in {}",
part.feature_type, transcript_uniquename);
}
} else {
if part.feature_type != FeatureType::ThreePrimeUtr {
println!("{:?}", parts);
panic!("wrong feature type '{}' after exons in {}",
part.feature_type, transcript_uniquename);
}
}
}
}
}
let last_part = parts.last().unwrap();
if last_part.feature_type != FeatureType::Exon {
for i in (0..parts.len()-1).rev() {
let part = &parts[i];
if part.feature_type == FeatureType::Exon {
let first_utr_after_exons = &parts[i+1];
let last_exon = &parts[i];
if last_exon.location.end_pos + 1 != first_utr_after_exons.location.start_pos {
println!("{} and exon don't meet up: {} at pos {}",
first_utr_after_exons.feature_type, transcript_uniquename,
first_utr_after_exons.location.end_pos);
}
break;
} else {
if part.location.strand == Strand::Forward {
if part.feature_type != FeatureType::ThreePrimeUtr {
panic!("wrong feature type '{}' before exons in {}",
part.feature_type, transcript_uniquename);
}
} else {
if part.feature_type != FeatureType::FivePrimeUtr {
panic!("wrong feature type '{}' after exons in {}",
part.feature_type, transcript_uniquename);
}
}
}
}
}
}
impl <'a> WebDataBuild<'a> {
pub fn new(raw: &'a Raw, domain_data: &'a HashMap<UniprotIdentifier, UniprotResult>,
config: &'a Config) -> WebDataBuild<'a>
{
WebDataBuild {
raw,
domain_data,
config,
genes: BTreeMap::new(),
genotypes: HashMap::new(),
genotype_backgrounds: HashMap::new(),
alleles: HashMap::new(),
other_features: HashMap::new(),
terms: HashMap::new(),
chromosomes: BTreeMap::new(),
references: HashMap::new(),
all_ont_annotations: HashMap::new(),
all_not_ont_annotations: HashMap::new(),
recent_references: RecentReferences {
admin_curated: vec![],
community_curated: vec![],
pubmed: vec![],
},
all_community_curated: vec![],
genes_of_transcripts: HashMap::new(),
transcripts_of_polypeptides: HashMap::new(),
parts_of_transcripts: HashMap::new(),
genes_of_alleles: HashMap::new(),
alleles_of_genotypes: HashMap::new(),
parts_of_extensions: HashMap::new(),
base_term_of_extensions: HashMap::new(),
children_by_termid: HashMap::new(),
dbxrefs_of_features: HashMap::new(),
possible_interesting_parents: get_possible_interesting_parents(config),
term_subsets: HashMap::new(),
gene_subsets: HashMap::new(),
annotation_details: HashMap::new(),
}
}
fn add_ref_to_hash(&self,
seen_references: &mut HashMap<String, ReferenceShortOptionMap>,
identifier: &str,
maybe_reference_uniquename: &Option<ReferenceUniquename>) {
if let Some(reference_uniquename) = maybe_reference_uniquename {
seen_references
.entry(identifier.into())
.or_insert_with(HashMap::new)
.insert(reference_uniquename.clone(), None);
}
}
fn add_gene_to_hash(&self,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
other_gene_uniquename: &GeneUniquename) {
seen_genes
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(other_gene_uniquename.clone(), None);
}
fn add_genotype_to_hash(&self,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
genotype_uniquename: &str) {
let genotype_short = self.make_genotype_short(&genotype_uniquename);
for expressed_allele in &genotype_short.expressed_alleles {
self.add_allele_to_hash(seen_alleles, seen_genes, identifier,
expressed_allele.allele_uniquename.clone());
}
seen_genotypes
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(genotype_uniquename.to_owned(), genotype_short);
}
fn add_allele_to_hash(&self,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
allele_uniquename: AlleleUniquename) -> AlleleShort {
let allele_short = self.make_allele_short(&allele_uniquename);
{
let allele_gene_uniquename = &allele_short.gene_uniquename;
self.add_gene_to_hash(seen_genes, identifier, allele_gene_uniquename);
seen_alleles
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(allele_uniquename, allele_short.clone());
}
allele_short
}
fn add_term_to_hash(&self,
seen_terms: &mut HashMap<TermId, TermShortOptionMap>,
identifier: &str,
other_termid: &TermId) {
seen_terms
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(other_termid.clone(), None);
}
fn get_gene<'b>(&'b self, gene_uniquename: &'b str) -> &'b GeneDetails {
if let Some(gene_details) = self.genes.get(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn get_gene_mut<'b>(&'b mut self, gene_uniquename: &'b str) -> &'b mut GeneDetails {
if let Some(gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_gene_short(&self, gene_uniquename: &str) -> GeneShort {
let gene_details = self.get_gene(gene_uniquename);
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
}
fn make_gene_summary(&self, gene_uniquename: &str) -> GeneSummary {
let gene_details = self.get_gene(gene_uniquename);
let synonyms =
gene_details.synonyms.iter()
.filter(|synonym| synonym.synonym_type == "exact")
.map(|synonym| synonym.name.clone())
.collect::<Vec<String>>();
let mut ortholog_ids =
gene_details.ortholog_annotations.iter()
.map(|ortholog_annotation| {
IdAndOrganism {
identifier: ortholog_annotation.ortholog_uniquename.clone(),
taxonid: ortholog_annotation.ortholog_taxonid,
}
})
.collect::<Vec<IdAndOrganism>>();
for ortholog_annotation in &gene_details.ortholog_annotations {
let orth_uniquename = &ortholog_annotation.ortholog_uniquename;
if let Some(orth_gene) =
self.genes.get(orth_uniquename) {
if let Some(ref orth_name) = orth_gene.name {
let id_and_org =IdAndOrganism {
identifier: orth_name.clone(),
taxonid: ortholog_annotation.ortholog_taxonid,
};
ortholog_ids.push(id_and_org);
}
} else {
panic!("missing GeneShort for: {:?}", orth_uniquename);
}
}
GeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
uniprot_identifier: gene_details.uniprot_identifier.clone(),
synonyms,
orthologs: ortholog_ids,
feature_type: gene_details.feature_type.clone(),
taxonid: gene_details.taxonid,
location: gene_details.location.clone(),
}
}
fn make_api_gene_summary(&self, gene_uniquename: &str) -> APIGeneSummary {
let gene_details = self.get_gene(gene_uniquename);
let synonyms =
gene_details.synonyms.iter()
.filter(|synonym| synonym.synonym_type == "exact")
.map(|synonym| synonym.name.clone())
.collect::<Vec<String>>();
let exon_count =
if let Some(transcript) = gene_details.transcripts.get(0) {
let mut count = 0;
for part in &transcript.parts {
if part.feature_type == FeatureType::Exon {
count += 1;
}
}
count
} else {
0
};
APIGeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
uniprot_identifier: gene_details.uniprot_identifier.clone(),
exact_synonyms: synonyms,
dbxrefs: gene_details.dbxrefs.clone(),
location: gene_details.location.clone(),
transcripts: gene_details.transcripts.clone(),
tm_domain_count: gene_details.tm_domain_coords.len(),
exon_count,
}
}
fn make_term_short(&self, termid: &str) -> TermShort {
if let Some(term_details) = self.terms.get(termid) {
TermShort::from_term_details(&term_details)
} else {
panic!("can't find TermDetails for termid: {}", termid)
}
}
fn add_characterisation_status(&mut self, gene_uniquename: &str,
cvterm_name: &str) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
gene_details.characterisation_status = Some(cvterm_name.into());
}
fn add_gene_product(&mut self, gene_uniquename: &str, product: &str) {
let gene_details = self.get_gene_mut(gene_uniquename);
gene_details.product = Some(product.to_owned());
}
fn add_name_description(&mut self, gene_uniquename: &str, name_description: &str) {
let gene_details = self.get_gene_mut(gene_uniquename);
gene_details.name_descriptions.push(name_description.into());
}
fn add_annotation(&mut self, cvterm: &Cvterm, is_not: bool,
annotation_template: OntAnnotationDetail) {
let termid =
match self.base_term_of_extensions.get(&cvterm.termid()) {
Some(base_termid) => base_termid.clone(),
None => cvterm.termid(),
};
let extension_parts =
match self.parts_of_extensions.get(&cvterm.termid()) {
Some(parts) => parts.clone(),
None => vec![],
};
let mut new_extension = extension_parts.clone();
let mut existing_extensions = annotation_template.extension.clone();
new_extension.append(&mut existing_extensions);
{
let compare_ext_part_func =
|e1: &ExtPart, e2: &ExtPart| compare_ext_part_with_config(self.config, e1, e2);
new_extension.sort_by(compare_ext_part_func);
};
let ont_annotation_detail =
OntAnnotationDetail {
extension: new_extension,
.. annotation_template
};
let annotation_map = if is_not {
&mut self.all_not_ont_annotations
} else {
&mut self.all_ont_annotations
};
let entry = annotation_map.entry(termid.clone());
entry.or_insert_with(Vec::new).push(ont_annotation_detail.id);
self.annotation_details.insert(ont_annotation_detail.id,
ont_annotation_detail);
}
fn process_dbxrefs(&mut self) {
let mut map = HashMap::new();
for feature_dbxref in &self.raw.feature_dbxrefs {
let feature = &feature_dbxref.feature;
let dbxref = &feature_dbxref.dbxref;
map.entry(feature.uniquename.clone())
.or_insert_with(HashSet::new)
.insert(dbxref.identifier());
}
self.dbxrefs_of_features = map;
}
fn process_references(&mut self) {
let mut all_uniquenames = vec![];
for rc_publication in &self.raw.publications {
let reference_uniquename = &rc_publication.uniquename;
let mut pubmed_authors: Option<String> = None;
let mut pubmed_publication_date: Option<String> = None;
let mut pubmed_abstract: Option<String> = None;
let mut canto_triage_status: Option<String> = None;
let mut canto_curator_role: Option<String> = None;
let mut canto_curator_name: Option<String> = None;
let mut canto_first_approved_date: Option<String> = None;
let mut canto_approved_date: Option<String> = None;
let mut canto_added_date: Option<String> = None;
let mut canto_session_submitted_date: Option<String> = None;
for prop in rc_publication.publicationprops.borrow().iter() {
match &prop.prop_type.name as &str {
"pubmed_publication_date" =>
pubmed_publication_date = Some(prop.value.clone()),
"pubmed_authors" =>
pubmed_authors = Some(prop.value.clone()),
"pubmed_abstract" =>
pubmed_abstract = Some(prop.value.clone()),
"canto_triage_status" =>
canto_triage_status = Some(prop.value.clone()),
"canto_curator_role" =>
canto_curator_role = Some(prop.value.clone()),
"canto_curator_name" =>
canto_curator_name = Some(prop.value.clone()),
"canto_first_approved_date" =>
canto_first_approved_date = Some(prop.value.clone()),
"canto_approved_date" =>
canto_approved_date = Some(prop.value.clone()),
"canto_added_date" =>
canto_added_date = Some(prop.value.clone()),
"canto_session_submitted_date" =>
canto_session_submitted_date = Some(prop.value.clone()),
_ => ()
}
}
let mut authors_abbrev = None;
let mut publication_year = None;
if let Some(authors) = pubmed_authors.clone() {
if authors.contains(',') {
let author_re = Regex::new(r"^(?P<f>[^,]+),.*$").unwrap();
let replaced: String =
author_re.replace_all(&authors, "$f et al.").into();
authors_abbrev = Some(replaced);
} else {
authors_abbrev = Some(authors.clone());
}
}
if let Some(publication_date) = pubmed_publication_date.clone() {
let date_re = Regex::new(r"^(.* )?(?P<y>\d\d\d\d)$").unwrap();
publication_year = Some(date_re.replace_all(&publication_date, "$y").into());
}
let mut approved_date = canto_first_approved_date.clone();
if approved_date.is_none() {
approved_date = canto_approved_date.clone();
}
if approved_date.is_none() {
approved_date = canto_session_submitted_date.clone();
}
approved_date =
if let Some(date) = approved_date {
let re = Regex::new(r"^(?P<date>\d\d\d\d-\d\d-\d\d).*").unwrap();
Some(re.replace_all(&date, "$date").into())
} else {
None
};
self.references.insert(reference_uniquename.clone(),
ReferenceDetails {
uniquename: reference_uniquename.clone(),
title: rc_publication.title.clone(),
citation: rc_publication.miniref.clone(),
pubmed_abstract: pubmed_abstract.clone(),
authors: pubmed_authors.clone(),
authors_abbrev,
pubmed_publication_date: pubmed_publication_date.clone(),
canto_triage_status,
canto_curator_role,
canto_curator_name,
canto_first_approved_date,
canto_approved_date,
canto_session_submitted_date,
canto_added_date,
approved_date,
publication_year,
cv_annotations: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
});
if pubmed_publication_date.is_some() {
all_uniquenames.push(reference_uniquename.clone());
}
}
let (recent_admin_curated, recent_community_curated,
all_community_curated) =
make_canto_curated(&self.references, &all_uniquenames);
let recent_references = RecentReferences {
pubmed: make_recently_added(&self.references, &all_uniquenames),
admin_curated: recent_admin_curated,
community_curated: recent_community_curated,
};
self.recent_references = recent_references;
self.all_community_curated = all_community_curated;
}
// make maps from genes to transcript, transcripts to polypeptide,
// exon, intron, UTRs
fn make_feature_rel_maps(&mut self) {
for feature_rel in &self.raw.feature_relationships {
let subject_type_name = &feature_rel.subject.feat_type.name;
let rel_name = &feature_rel.rel_type.name;
let object_type_name = &feature_rel.object.feat_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
if TRANSCRIPT_FEATURE_TYPES.contains(&subject_type_name.as_str()) &&
rel_name == "part_of" && is_gene_type(object_type_name) {
self.genes_of_transcripts.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "polypeptide" &&
rel_name == "derives_from" &&
object_type_name == "mRNA" {
self.transcripts_of_polypeptides.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "allele" {
if feature_rel.rel_type.name == "instance_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_alleles.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if feature_rel.rel_type.name == "part_of" &&
object_type_name == "genotype" {
let expression = get_feat_rel_expression(&feature_rel.subject, feature_rel);
let allele_and_expression =
ExpressedAllele {
allele_uniquename: subject_uniquename.clone(),
expression,
};
let entry = self.alleles_of_genotypes.entry(object_uniquename.clone());
entry.or_insert_with(Vec::new).push(allele_and_expression);
continue;
}
}
if TRANSCRIPT_PART_TYPES.contains(&subject_type_name.as_str()) {
let entry = self.parts_of_transcripts.entry(object_uniquename.clone());
let part = make_feature_short(&self.chromosomes, &feature_rel.subject);
entry.or_insert_with(Vec::new).push(part);
}
}
}
fn get_feature_dbxrefs(&self, feature: &Feature) -> HashSet<String> {
if let Some(dbxrefs) = self.dbxrefs_of_features.get(&feature.uniquename) {
dbxrefs.clone()
} else {
HashSet::new()
}
}
fn store_gene_details(&mut self, feat: &Feature) {
let maybe_location = make_location(&self.chromosomes, feat);
if let Some(ref location) = maybe_location {
if let Some(ref mut chr) = self.chromosomes.get_mut(&location.chromosome_name) {
chr.gene_uniquenames.push(feat.uniquename.clone());
}
}
let organism = make_organism(&feat.organism);
let dbxrefs = self.get_feature_dbxrefs(feat);
let mut orfeome_identifier = None;
for dbxref in &dbxrefs {
if dbxref.starts_with("SPD:") {
orfeome_identifier = Some(String::from(&dbxref[4..]));
}
}
let mut uniprot_identifier = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "uniprot_identifier" {
uniprot_identifier = prop.value.clone();
break;
}
}
let (interpro_matches, tm_domain_coords) =
if let Some(ref uniprot_identifier) = uniprot_identifier {
if let Some(result) = self.domain_data.get(uniprot_identifier) {
let tm_domain_matches = result.tmhmm_matches.iter()
.map(|tm_match| (tm_match.start, tm_match.end))
.collect::<Vec<_>>();
(result.interpro_matches.clone(), tm_domain_matches)
} else {
(vec![], vec![])
}
} else {
(vec![], vec![])
};
let gene_feature = GeneDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
taxonid: organism.taxonid,
product: None,
deletion_viability: DeletionViability::Unknown,
uniprot_identifier,
interpro_matches,
tm_domain_coords,
orfeome_identifier,
name_descriptions: vec![],
synonyms: vec![],
dbxrefs,
feature_type: feat.feat_type.name.clone(),
characterisation_status: None,
location: maybe_location,
gene_neighbourhood: vec![],
cv_annotations: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
target_of_annotations: vec![],
transcripts: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
};
self.genes.insert(feat.uniquename.clone(), gene_feature);
}
fn get_transcript_parts(&mut self, transcript_uniquename: &str) -> Vec<FeatureShort> {
let mut parts = self.parts_of_transcripts.remove(transcript_uniquename)
.expect("can't find transcript");
if parts.is_empty() {
panic!("transcript has no parts: {}", transcript_uniquename);
}
let part_cmp = |a: &FeatureShort, b: &FeatureShort| {
a.location.start_pos.cmp(&b.location.start_pos)
};
parts.sort_by(&part_cmp);
validate_transcript_parts(transcript_uniquename, &parts);
let chr_name = &parts[0].location.chromosome_name.clone();
if let Some(chromosome) = self.chromosomes.get(chr_name) {
add_introns_to_transcript(chromosome, transcript_uniquename, &mut parts);
} else {
panic!("can't find chromosome details for: {}", chr_name);
}
if parts[0].location.strand == Strand::Reverse {
parts.reverse();
}
parts
}
fn store_transcript_details(&mut self, feat: &Feature) {
let transcript_uniquename = feat.uniquename.clone();
let parts = self.get_transcript_parts(&transcript_uniquename);
if parts.is_empty() {
panic!("transcript has no parts");
}
let mut transcript_start = u32::MAX;
let mut transcript_end = 0;
for part in &parts {
if part.location.start_pos < transcript_start {
transcript_start = part.location.start_pos;
}
if part.location.end_pos > transcript_end {
transcript_end = part.location.end_pos;
}
}
// use the first part as a template to get the chromosome details
let transcript_location =
ChromosomeLocation {
start_pos: transcript_start,
end_pos: transcript_end,
phase: None,
.. parts[0].location.clone()
};
let maybe_cds_location =
if feat.feat_type.name == "mRNA" {
let mut cds_start = u32::MAX;
let mut cds_end = 0;
for part in &parts {
if part.feature_type == FeatureType::Exon {
if part.location.start_pos < cds_start {
cds_start = part.location.start_pos;
}
if part.location.end_pos > cds_end {
cds_end = part.location.end_pos;
}
}
}
if cds_end == 0 {
None
} else {
if let Some(mrna_location) = feat.featurelocs.borrow().get(0) {
let first_part_loc = &parts[0].location;
Some(ChromosomeLocation {
chromosome_name: first_part_loc.chromosome_name.clone(),
start_pos: cds_start,
end_pos: cds_end,
strand: first_part_loc.strand.clone(),
phase: make_phase(&mrna_location),
})
} else {
None
}
}
} else {
None
};
let transcript = TranscriptDetails {
uniquename: transcript_uniquename.clone(),
location: transcript_location,
transcript_type: feat.feat_type.name.clone(),
parts,
protein: None,
cds_location: maybe_cds_location,
};
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&transcript_uniquename) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
if gene_details.feature_type != "pseudogene" {
gene_details.feature_type =
transcript.transcript_type.clone() + " " + &gene_details.feature_type;
}
gene_details.transcripts.push(transcript);
} else {
panic!("can't find gene for transcript: {}", transcript_uniquename);
}
}
fn store_protein_details(&mut self, feat: &Feature) {
if let Some(residues) = feat.residues.clone() {
let protein_uniquename = feat.uniquename.clone();
let mut molecular_weight = None;
let mut average_residue_weight = None;
let mut charge_at_ph7 = None;
let mut isoelectric_point = None;
let mut codon_adaptation_index = None;
let parse_prop_as_f32 = |p: &Option<String>| {
if let Some(prop_value) = p.clone() {
let maybe_value = prop_value.parse();
if let Ok(parsed_prop) = maybe_value {
Some(parsed_prop)
} else {
println!("{}: couldn't parse {} as f32",
feat.uniquename, prop_value);
None
}
} else {
None
}
};
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "molecular_weight" {
if let Some(value) = parse_prop_as_f32(&prop.value) {
molecular_weight = Some(value / 1000.0);
}
}
if prop.prop_type.name == "average_residue_weight" {
if let Some(value) = parse_prop_as_f32(&prop.value) {
average_residue_weight = Some(value / 1000.0);
}
}
if prop.prop_type.name == "charge_at_ph7" {
charge_at_ph7 = parse_prop_as_f32(&prop.value);
}
if prop.prop_type.name == "isoelectric_point" {
isoelectric_point = parse_prop_as_f32(&prop.value);
}
if prop.prop_type.name == "codon_adaptation_index" {
codon_adaptation_index = parse_prop_as_f32(&prop.value);
}
}
if molecular_weight.is_none() {
panic!("{} has no molecular_weight", feat.uniquename)
}
let protein = ProteinDetails {
uniquename: feat.uniquename.clone(),
sequence: residues,
molecular_weight: molecular_weight.unwrap(),
average_residue_weight: average_residue_weight.unwrap(),
charge_at_ph7: charge_at_ph7.unwrap(),
isoelectric_point: isoelectric_point.unwrap(),
codon_adaptation_index: codon_adaptation_index.unwrap(),
};
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&protein_uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
if gene_details.transcripts.len() > 1 {
panic!("unimplemented - can't handle multiple transcripts for: {}",
gene_uniquename);
} else {
if gene_details.transcripts.is_empty() {
panic!("gene has no transcript: {}", gene_uniquename);
} else {
gene_details.transcripts[0].protein = Some(protein);
}
}
} else {
panic!("can't find gene for transcript: {}", transcript_uniquename);
}
} else {
panic!("can't find transcript of polypeptide: {}", protein_uniquename)
}
} else {
panic!("no residues for protein: {}", feat.uniquename);
}
}
fn store_chromosome_details(&mut self, feat: &Feature) {
let mut ena_identifier = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "ena_id" {
ena_identifier = prop.value.clone()
}
}
if feat.residues.is_none() {
panic!("{:?}", feat.uniquename);
}
let org = make_organism(&feat.organism);
let chr = ChromosomeDetails {
name: feat.uniquename.clone(),
residues: feat.residues.clone().unwrap(),
ena_identifier: ena_identifier.unwrap(),
gene_uniquenames: vec![],
taxonid: org.taxonid,
};
self.chromosomes.insert(feat.uniquename.clone(), chr);
}
fn store_genotype_details(&mut self, feat: &Feature) {
let mut expressed_alleles =
self.alleles_of_genotypes[&feat.uniquename].clone();
let genotype_display_uniquename =
make_genotype_display_name(&expressed_alleles, &self.alleles);
{
let allele_cmp = |allele1: &ExpressedAllele, allele2: &ExpressedAllele| {
let allele1_display_name =
allele_display_name(&self.alleles[&allele1.allele_uniquename]);
let allele2_display_name =
allele_display_name(&self.alleles[&allele2.allele_uniquename]);
allele1_display_name.cmp(&allele2_display_name)
};
expressed_alleles.sort_by(&allele_cmp);
}
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "genotype_background" {
if let Some(ref background) = prop.value {
self.genotype_backgrounds.insert(feat.uniquename.clone(),
background.clone());
}
}
}
self.genotypes.insert(genotype_display_uniquename.clone(),
GenotypeDetails {
display_uniquename: genotype_display_uniquename,
name: feat.name.clone(),
expressed_alleles,
cv_annotations: HashMap::new(),
genes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
});
}
fn store_allele_details(&mut self, feat: &Feature) {
let mut allele_type = None;
let mut description = None;
for prop in feat.featureprops.borrow().iter() {
match &prop.prop_type.name as &str {
"allele_type" =>
allele_type = prop.value.clone(),
"description" =>
description = prop.value.clone(),
_ => ()
}
}
if allele_type.is_none() {
panic!("no allele_type cvtermprop for {}", &feat.uniquename);
}
let gene_uniquename =
self.genes_of_alleles[&feat.uniquename].clone();
let allele_details = AlleleShort {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
gene_uniquename,
allele_type: allele_type.unwrap(),
description,
};
self.alleles.insert(feat.uniquename.clone(), allele_details);
}
fn process_chromosome_features(&mut self) {
// we need to process all chromosomes before other featuers
for feat in &self.raw.features {
if feat.feat_type.name == "chromosome" {
self.store_chromosome_details(feat);
}
}
}
fn process_features(&mut self) {
// we need to process all genes before transcripts
for feat in &self.raw.features {
if feat.feat_type.name == "gene" || feat.feat_type.name == "pseudogene" {
self.store_gene_details(feat);
}
}
for feat in &self.raw.features {
if TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) {
self.store_transcript_details(feat)
}
}
for feat in &self.raw.features {
if feat.feat_type.name == "polypeptide"{
self.store_protein_details(feat);
}
}
for feat in &self.raw.features {
if !TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) &&
!TRANSCRIPT_PART_TYPES.contains(&feat.feat_type.name.as_str()) &&
!HANDLED_FEATURE_TYPES.contains(&feat.feat_type.name.as_str())
{
// for now, ignore features without locations
if feat.featurelocs.borrow().len() > 0 {
let feature_short = make_feature_short(&self.chromosomes, &feat);
self.other_features.insert(feat.uniquename.clone(), feature_short);
}
}
}
}
fn add_interesting_parents(&mut self) {
let mut interesting_parents_by_termid: HashMap<String, HashSet<String>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if self.is_interesting_parent(&object_termid, &rel_term_name) {
interesting_parents_by_termid
.entry(subject_termid.clone())
.or_insert_with(HashSet::new)
.insert(object_termid);
};
}
for (termid, interesting_parents) in interesting_parents_by_termid {
let term_details = self.terms.get_mut(&termid).unwrap();
term_details.interesting_parents = interesting_parents;
}
}
fn process_allele_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "allele" {
self.store_allele_details(feat);
}
}
}
fn process_genotype_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "genotype" {
self.store_genotype_details(feat);
}
}
}
fn add_gene_neighbourhoods(&mut self) {
struct GeneAndLoc {
gene_uniquename: String,
loc: ChromosomeLocation,
};
let mut genes_and_locs: Vec<GeneAndLoc> = vec![];
for gene_details in self.genes.values() {
if let Some(ref location) = gene_details.location {
genes_and_locs.push(GeneAndLoc {
gene_uniquename: gene_details.uniquename.clone(),
loc: location.clone(),
});
}
}
let cmp = |a: &GeneAndLoc, b: &GeneAndLoc| {
let order = a.loc.chromosome_name.cmp(&b.loc.chromosome_name);
if order == Ordering::Equal {
a.loc.start_pos.cmp(&b.loc.start_pos)
} else {
order
}
};
genes_and_locs.sort_by(cmp);
for (i, this_gene_and_loc) in genes_and_locs.iter().enumerate() {
let mut nearby_genes: Vec<GeneShort> = vec![];
if i > 0 {
let start_index =
if i > GENE_NEIGHBOURHOOD_DISTANCE {
i - GENE_NEIGHBOURHOOD_DISTANCE
} else {
0
};
for back_index in (start_index..i).rev() {
let back_gene_and_loc = &genes_and_locs[back_index];
if back_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let back_gene_short = self.make_gene_short(&back_gene_and_loc.gene_uniquename);
nearby_genes.insert(0, back_gene_short);
}
}
let gene_short = self.make_gene_short(&this_gene_and_loc.gene_uniquename);
nearby_genes.push(gene_short);
if i < genes_and_locs.len() - 1 {
let end_index =
if i + GENE_NEIGHBOURHOOD_DISTANCE >= genes_and_locs.len() {
genes_and_locs.len()
} else {
i + GENE_NEIGHBOURHOOD_DISTANCE + 1
};
for forward_index in i+1..end_index {
let forward_gene_and_loc = &genes_and_locs[forward_index];
if forward_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let forward_gene_short = self.make_gene_short(&forward_gene_and_loc.gene_uniquename);
nearby_genes.push(forward_gene_short);
}
}
let this_gene_details =
self.genes.get_mut(&this_gene_and_loc.gene_uniquename).unwrap();
this_gene_details.gene_neighbourhood.append(&mut nearby_genes);
}
}
// add interaction, ortholog and paralog annotations
fn process_annotation_feature_rels(&mut self) {
for feature_rel in &self.raw.feature_relationships {
let rel_name = &feature_rel.rel_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
for rel_config in &FEATURE_REL_CONFIGS {
if rel_name == rel_config.rel_type_name &&
is_gene_type(&feature_rel.subject.feat_type.name) &&
is_gene_type(&feature_rel.object.feat_type.name) {
let mut evidence: Option<Evidence> = None;
let mut is_inferred_interaction: bool = false;
let borrowed_publications = feature_rel.publications.borrow();
let maybe_publication = borrowed_publications.get(0);
let maybe_reference_uniquename =
match maybe_publication {
Some(publication) => Some(publication.uniquename.clone()),
None => None,
};
for prop in feature_rel.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "evidence" {
if let Some(ref evidence_long) = prop.value {
for (evidence_code, ev_details) in &self.config.evidence_types {
if &ev_details.long == evidence_long {
evidence = Some(evidence_code.clone());
}
}
if evidence.is_none() {
evidence = Some(evidence_long.clone());
}
}
}
if prop.prop_type.name == "is_inferred" {
if let Some(is_inferred_value) = prop.value.clone() {
if is_inferred_value == "yes" {
is_inferred_interaction = true;
}
}
}
}
let evidence_clone = evidence.clone();
let gene_uniquename = subject_uniquename;
let gene_organism_taxonid = {
self.genes[subject_uniquename].taxonid
};
let other_gene_uniquename = object_uniquename;
let other_gene_organism_taxonid = {
self.genes[object_uniquename].taxonid
};
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction =>
if !is_inferred_interaction {
let interaction_annotation =
InteractionAnnotation {
gene_uniquename: gene_uniquename.clone(),
interactor_uniquename: other_gene_uniquename.clone(),
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
{
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
if rel_name == "interacts_physically" {
gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
if gene_uniquename != other_gene_uniquename {
let other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
if rel_name == "interacts_physically" {
other_gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
other_gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if rel_name == "interacts_physically" {
ref_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
ref_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: gene_uniquename.clone(),
ortholog_uniquename: other_gene_uniquename.clone(),
ortholog_taxonid: other_gene_organism_taxonid,
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == gene_details.taxonid {
ref_details.ortholog_annotations.push(ortholog_annotation);
}
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: gene_uniquename.clone(),
paralog_uniquename: other_gene_uniquename.clone(),
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == gene_details.taxonid {
ref_details.paralog_annotations.push(paralog_annotation);
}
}
}
}
// for orthologs and paralogs, store the reverse annotation too
let other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
ortholog_uniquename: gene_uniquename.clone(),
ortholog_taxonid: gene_organism_taxonid,
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
};
other_gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == other_gene_details.taxonid {
ref_details.ortholog_annotations.push(ortholog_annotation);
}
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
paralog_uniquename: gene_uniquename.clone(),
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
};
other_gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == other_gene_details.taxonid {
ref_details.paralog_annotations.push(paralog_annotation);
}
}
},
}
}
}
}
for ref_details in self.references.values_mut() {
ref_details.physical_interactions.sort();
ref_details.genetic_interactions.sort();
ref_details.ortholog_annotations.sort();
ref_details.paralog_annotations.sort();
}
for gene_details in self.genes.values_mut() {
gene_details.physical_interactions.sort();
gene_details.genetic_interactions.sort();
gene_details.ortholog_annotations.sort();
gene_details.paralog_annotations.sort();
}
}
// find the extension_display_names config for the given termid and relation type name
fn matching_ext_config(&self, annotation_termid: &str,
rel_type_name: &str) -> Option<ExtensionDisplayNames> {
let ext_configs = &self.config.extension_display_names;
if let Some(annotation_term_details) = self.terms.get(annotation_termid) {
for ext_config in ext_configs {
if ext_config.rel_name == rel_type_name {
if let Some(if_descendent_of) = ext_config.if_descendent_of.clone() {
if annotation_term_details.interesting_parents.contains(&if_descendent_of) {
return Some((*ext_config).clone());
}
} else {
return Some((*ext_config).clone());
}
}
}
} else {
panic!("can't find details for term: {}\n", annotation_termid);
}
None
}
// create and returns any TargetOfAnnotations implied by the extension
fn make_target_of_for_ext(&self, cv_name: &str,
genes: &[String],
maybe_genotype_uniquename: &Option<String>,
reference_uniquename: &Option<String>,
annotation_termid: &str,
extension: &[ExtPart]) -> Vec<(GeneUniquename, TargetOfAnnotation)> {
let mut ret_vec = vec![];
for ext_part in extension {
let maybe_ext_config =
self.matching_ext_config(annotation_termid, &ext_part.rel_type_name);
if let ExtRange::Gene(ref target_gene_uniquename) = ext_part.ext_range {
if let Some(ext_config) = maybe_ext_config {
if let Some(reciprocal_display_name) =
ext_config.reciprocal_display {
let (annotation_gene_uniquenames, annotation_genotype_uniquename) =
if maybe_genotype_uniquename.is_some() {
(genes.clone(), maybe_genotype_uniquename.clone())
} else {
(genes.clone(), None)
};
ret_vec.push(((*target_gene_uniquename).clone(),
TargetOfAnnotation {
ontology_name: cv_name.into(),
ext_rel_display_name: reciprocal_display_name,
genes: annotation_gene_uniquenames.to_vec(),
genotype_uniquename: annotation_genotype_uniquename,
reference_uniquename: reference_uniquename.clone(),
}));
}
}
}
}
ret_vec
}
fn add_target_of_annotations(&mut self) {
let mut target_of_annotations: HashMap<GeneUniquename, HashSet<TargetOfAnnotation>> =
HashMap::new();
for term_details in self.terms.values() {
for term_annotations in term_details.cv_annotations.values() {
for term_annotation in term_annotations {
'ANNOTATION: for annotation_id in &term_annotation.annotations {
let annotation = self.annotation_details
.get(&annotation_id).expect("can't find OntAnnotationDetail");
if let Some(ref genotype_uniquename) = annotation.genotype {
let genotype = &self.genotypes[genotype_uniquename];
if genotype.expressed_alleles.len() > 1 {
break 'ANNOTATION;
}
}
let new_annotations =
self.make_target_of_for_ext(&term_details.cv_name,
&annotation.genes,
&annotation.genotype,
&annotation.reference,
&term_details.termid,
&annotation.extension);
for (target_gene_uniquename, new_annotation) in new_annotations {
target_of_annotations
.entry(target_gene_uniquename.clone())
.or_insert_with(HashSet::new)
.insert(new_annotation);
}
}
}
}
}
for (gene_uniquename, mut target_of_annotations) in target_of_annotations {
let gene_details = self.genes.get_mut(&gene_uniquename).unwrap();
gene_details.target_of_annotations = target_of_annotations.drain().collect();
}
}
fn set_deletion_viability(&mut self) {
let mut gene_statuses = HashMap::new();
let condition_string =
|condition_ids: HashSet<String>| {
let mut ids_vec: Vec<String> = condition_ids.iter().cloned().collect();
ids_vec.sort();
ids_vec.join(" ")
};
let viable_termid = &self.config.viability_terms.viable;
let inviable_termid = &self.config.viability_terms.inviable;
for (gene_uniquename, gene_details) in &mut self.genes {
let mut new_status = DeletionViability::Unknown;
if let Some(single_allele_term_annotations) =
gene_details.cv_annotations.get("single_allele_phenotype") {
let mut viable_conditions: HashMap<String, TermId> = HashMap::new();
let mut inviable_conditions: HashMap<String, TermId> = HashMap::new();
for term_annotation in single_allele_term_annotations {
'ANNOTATION: for annotation_id in &term_annotation.annotations {
let annotation = self.annotation_details
.get(&annotation_id).expect("can't find OntAnnotationDetail");
let genotype_uniquename = annotation.genotype.as_ref().unwrap();
let genotype = &self.genotypes[genotype_uniquename];
let allele_uniquename =
genotype.expressed_alleles[0].allele_uniquename.clone();
let allele = &self.alleles[&allele_uniquename];
if allele.allele_type != "deletion" {
continue 'ANNOTATION;
}
let term = &self.terms[&term_annotation.term];
let interesting_parents = &term.interesting_parents;
let conditions_as_string =
condition_string(annotation.conditions.clone());
if interesting_parents.contains(viable_termid) ||
*viable_termid == term_annotation.term {
viable_conditions.insert(conditions_as_string,
term_annotation.term.clone());
} else {
if interesting_parents.contains(inviable_termid) ||
*inviable_termid == term_annotation.term {
inviable_conditions.insert(conditions_as_string,
term_annotation.term.clone());
}
}
}
}
if viable_conditions.is_empty() {
if !inviable_conditions.is_empty() {
new_status = DeletionViability::Inviable;
}
} else {
if inviable_conditions.is_empty() {
new_status = DeletionViability::Viable;
} else {
new_status = DeletionViability::DependsOnConditions;
let viable_conditions_set: HashSet<String> =
viable_conditions.keys().cloned().collect();
let inviable_conditions_set: HashSet<String> =
inviable_conditions.keys().cloned().collect();
let intersecting_conditions =
viable_conditions_set.intersection(&inviable_conditions_set);
if intersecting_conditions.clone().count() > 0 {
println!("{} is viable and inviable with", gene_uniquename);
for cond in intersecting_conditions {
if cond.is_empty() {
println!(" no conditions");
} else {
println!(" conditions: {}", cond);
}
println!(" viable term: {}",
viable_conditions[cond]);
println!(" inviable term: {}",
inviable_conditions[cond]);
}
}
}
}
}
gene_statuses.insert(gene_uniquename.clone(), new_status);
}
for (gene_uniquename, status) in &gene_statuses {
if let Some(ref mut gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details.deletion_viability = status.clone();
}
}
}
fn set_term_details_subsets(&mut self) {
'TERM: for go_slim_conf in self.config.go_slim_terms.clone() {
let slim_termid = &go_slim_conf.termid;
for term_details in self.terms.values_mut() {
if term_details.termid == *slim_termid {
term_details.subsets.push("goslim_pombe".into());
break 'TERM;
}
}
}
}
fn make_gene_short_map(&self) -> IdGeneShortMap {
let mut ret_map = HashMap::new();
for gene_uniquename in self.genes.keys() {
ret_map.insert(gene_uniquename.clone(),
make_gene_short(&self.genes, &gene_uniquename));
}
ret_map
}
fn make_all_cv_summaries(&mut self) {
let gene_short_map = self.make_gene_short_map();
for term_details in self.terms.values_mut() {
make_cv_summaries(term_details, self.config, &self.children_by_termid,
true, true, &gene_short_map, &self.annotation_details);
}
for gene_details in self.genes.values_mut() {
make_cv_summaries(gene_details, &self.config, &self.children_by_termid,
false, true, &gene_short_map, &self.annotation_details);
}
for genotype_details in self.genotypes.values_mut() {
make_cv_summaries(genotype_details, &self.config, &self.children_by_termid,
false, false, &gene_short_map, &self.annotation_details);
}
for reference_details in self.references.values_mut() {
make_cv_summaries( reference_details, &self.config, &self.children_by_termid,
true, true, &gene_short_map, &self.annotation_details);
}
}
fn process_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name != POMBASE_ANN_EXT_TERM_CV_NAME {
let cv_config =
self.config.cv_config_by_name(&cvterm.cv.name);
let annotation_feature_type =
cv_config.feature_type.clone();
let synonyms =
cvterm.cvtermsynonyms.borrow().iter().map(|syn| {
SynonymDetails {
synonym_type: (*syn).synonym_type.name.clone(),
name: syn.name.clone(),
}
}).collect::<Vec<_>>();
self.terms.insert(cvterm.termid(),
TermDetails {
name: cvterm.name.clone(),
cv_name: cvterm.cv.name.clone(),
annotation_feature_type,
interesting_parents: HashSet::new(),
subsets: vec![],
termid: cvterm.termid(),
synonyms,
definition: cvterm.definition.clone(),
direct_ancestors: vec![],
genes_annotated_with: HashSet::new(),
is_obsolete: cvterm.is_obsolete,
single_allele_genotype_uniquenames: HashSet::new(),
cv_annotations: HashMap::new(),
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
gene_count: 0,
genotype_count: 0,
});
}
}
}
fn get_ext_rel_display_name(&self, annotation_termid: &str,
ext_rel_name: &str) -> String {
if let Some(ext_conf) = self.matching_ext_config(annotation_termid, ext_rel_name) {
ext_conf.display_name.clone()
} else {
str::replace(&ext_rel_name, "_", " ")
}
}
fn process_extension_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
for cvtermprop in cvterm.cvtermprops.borrow().iter() {
if (*cvtermprop).prop_type.name.starts_with(ANNOTATION_EXT_REL_PREFIX) {
let ext_rel_name_str =
&(*cvtermprop).prop_type.name[ANNOTATION_EXT_REL_PREFIX.len()..];
let ext_rel_name = String::from(ext_rel_name_str);
let ext_range = (*cvtermprop).value.clone();
let range: ExtRange = if ext_range.starts_with("SP") {
if ext_range.ends_with("-promoter") {
ExtRange::Promoter(ext_range)
} else {
ExtRange::Gene(ext_range)
}
} else {
ExtRange::Misc(ext_range)
};
if let Some(base_termid) =
self.base_term_of_extensions.get(&cvterm.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(&base_termid, &ext_rel_name);
self.parts_of_extensions.entry(cvterm.termid())
.or_insert_with(Vec::new).push(ExtPart {
rel_type_name: ext_rel_name,
rel_type_display_name,
ext_range: range,
});
} else {
panic!("can't find details for term: {}\n", cvterm.termid());
}
}
}
}
}
}
fn process_cvterm_rels(&mut self) {
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name == "is_a" {
self.base_term_of_extensions.insert(subject_termid.clone(),
object_term.termid().clone());
}
} else {
let object_term_short =
self.make_term_short(&object_term.termid());
if let Some(ref mut subject_term_details) = self.terms.get_mut(&subject_term.termid()) {
subject_term_details.direct_ancestors.push(TermAndRelation {
termid: object_term_short.termid.clone(),
term_name: object_term_short.name.clone(),
relation_name: rel_type.name.clone(),
});
}
}
}
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name != "is_a" {
if let Some(base_termid) =
self.base_term_of_extensions.get(&subject_term.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(base_termid, &rel_type.name);
self.parts_of_extensions.entry(subject_termid)
.or_insert_with(Vec::new).push(ExtPart {
rel_type_name: rel_type.name.clone(),
rel_type_display_name,
ext_range: ExtRange::Term(object_term.termid().clone()),
});
} else {
panic!("can't find details for {}\n", object_term.termid());
}
}
}
}
}
fn process_feature_synonyms(&mut self) {
for feature_synonym in &self.raw.feature_synonyms {
let feature = &feature_synonym.feature;
let synonym = &feature_synonym.synonym;
if let Some(ref mut gene_details) = self.genes.get_mut(&feature.uniquename) {
gene_details.synonyms.push(SynonymDetails {
name: synonym.name.clone(),
synonym_type: synonym.synonym_type.name.clone()
});
}
}
}
fn make_genotype_short(&self, genotype_display_name: &str) -> GenotypeShort {
if let Some(ref details) = self.genotypes.get(genotype_display_name) {
GenotypeShort {
display_uniquename: details.display_uniquename.clone(),
name: details.name.clone(),
expressed_alleles: details.expressed_alleles.clone(),
}
} else {
panic!("can't find genotype {}", genotype_display_name);
}
}
fn make_allele_short(&self, allele_uniquename: &str) -> AlleleShort {
self.alleles[allele_uniquename].clone()
}
// process feature properties stored as cvterms,
// eg. characterisation_status and product
fn process_props_from_feature_cvterms(&mut self) {
for feature_cvterm in &self.raw.feature_cvterms {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let gene_uniquenames_vec: Vec<GeneUniquename> =
if cvterm.cv.name == "PomBase gene products" {
if feature.feat_type.name == "polypeptide" {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
if feature.feat_type.name == "gene" {
vec![feature.uniquename.clone()]
} else {
vec![]
}
}
}
} else {
vec![]
};
for gene_uniquename in &gene_uniquenames_vec {
self.add_gene_product(&gene_uniquename, &cvterm.name);
}
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
if cvterm.cv.name == "PomBase gene characterisation status" {
self.add_characterisation_status(&feature.uniquename, &cvterm.name);
} else {
if cvterm.cv.name == "name_description" {
self.add_name_description(&feature.uniquename, &cvterm.name);
}
}
}
}
}
fn get_gene_prod_extension(&self, prod_value: &str) -> ExtPart {
let ext_range =
if prod_value.starts_with("PR:") {
ExtRange::GeneProduct(prod_value.to_owned())
} else {
ExtRange::Misc(prod_value.to_owned())
};
ExtPart {
rel_type_name: "active_form".into(),
rel_type_display_name: "active form".into(),
ext_range,
}
}
// return a fake extension for "with" properties on protein binding annotations
fn get_with_extension(&self, with_value: &str) -> ExtPart {
let ext_range =
if with_value.starts_with("SP%") {
ExtRange::Gene(with_value.to_owned())
} else {
if with_value.starts_with("PomBase:SP") {
let gene_uniquename =
String::from(&with_value[8..]);
ExtRange::Gene(gene_uniquename)
} else {
if with_value.to_lowercase().starts_with("pfam:") {
ExtRange::Domain(with_value.to_owned())
} else {
ExtRange::Misc(with_value.to_owned())
}
}
};
// a with property on a protein binding (GO:0005515) is
// displayed as a binds extension
// https://github.com/pombase/website/issues/108
ExtPart {
rel_type_name: "binds".into(),
rel_type_display_name: "binds".into(),
ext_range,
}
}
fn make_with_or_from_value(&self, with_or_from_value: String) -> WithFromValue {
let db_prefix_patt = String::from("^") + DB_NAME + ":";
let re = Regex::new(&db_prefix_patt).unwrap();
let gene_uniquename: String = re.replace_all(&with_or_from_value, "").into();
if self.genes.contains_key(&gene_uniquename) {
let gene_short = self.make_gene_short(&gene_uniquename);
WithFromValue::Gene(gene_short)
} else {
if self.terms.get(&with_or_from_value).is_some() {
WithFromValue::Term(self.make_term_short(&with_or_from_value))
} else {
WithFromValue::Identifier(with_or_from_value)
}
}
}
// add the with value as a fake extension if the cvterm is_a protein binding,
// otherwise return the value
fn make_with_extension(&self, termid: &str, evidence_code: Option<String>,
extension: &mut Vec<ExtPart>,
with_value: String) -> Option<WithFromValue> {
let base_termid =
match self.base_term_of_extensions.get(termid) {
Some(base_termid) => base_termid.clone(),
None => termid.to_owned(),
};
let base_term_short = self.make_term_short(&base_termid);
if evidence_code.is_some() &&
evidence_code.unwrap() == "IPI" &&
// add new IDs to the interesting_parents config
(base_term_short.termid == "GO:0005515" ||
base_term_short.interesting_parents.contains("GO:0005515") ||
base_term_short.termid == "GO:0003723" ||
base_term_short.interesting_parents.contains("GO:0003723")) {
extension.push(self.get_with_extension(&with_value));
} else {
return Some(self.make_with_or_from_value(with_value));
}
None
}
// process annotation
fn process_feature_cvterms(&mut self) {
for feature_cvterm in &self.raw.feature_cvterms {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let mut extension = vec![];
if cvterm.cv.name == "PomBase gene characterisation status" ||
cvterm.cv.name == "PomBase gene products" ||
cvterm.cv.name == "name_description" {
continue;
}
let publication = &feature_cvterm.publication;
let mut extra_props: HashMap<String, String> = HashMap::new();
let mut conditions: HashSet<TermId> = HashSet::new();
let mut withs: Vec<WithFromValue> = vec![];
let mut froms: Vec<WithFromValue> = vec![];
let mut qualifiers: Vec<Qualifier> = vec![];
let mut evidence: Option<String> = None;
let mut genotype_background: Option<String> = None;
// need to get evidence first as it's used later
// See: https://github.com/pombase/website/issues/455
for prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
if &prop.type_name() == "evidence" {
if let Some(ref evidence_long) = prop.value {
for (evidence_code, ev_details) in &self.config.evidence_types {
if &ev_details.long == evidence_long {
evidence = Some(evidence_code.clone());
}
}
if evidence.is_none() {
evidence = Some(evidence_long.clone());
}
}
}
}
for prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
match &prop.type_name() as &str {
"residue" | "scale" |
"quant_gene_ex_copies_per_cell" |
"quant_gene_ex_avg_copies_per_cell" => {
if let Some(value) = prop.value.clone() {
extra_props.insert(prop.type_name().clone(), value);
}
},
"condition" =>
if let Some(value) = prop.value.clone() {
conditions.insert(value.clone());
},
"qualifier" =>
if let Some(value) = prop.value.clone() {
qualifiers.push(value);
},
"with" => {
if let Some(with_value) = prop.value.clone() {
if let Some(with_gene_short) =
self.make_with_extension(&cvterm.termid(), evidence.clone(),
&mut extension, with_value) {
withs.push(with_gene_short);
}
}
},
"from" => {
if let Some(value) = prop.value.clone() {
froms.push(self.make_with_or_from_value(value));
}
},
"gene_product_form_id" => {
if let Some(value) = prop.value.clone() {
extension.push(self.get_gene_prod_extension(&value));
}
},
_ => ()
}
}
let mut maybe_genotype_uniquename = None;
let mut gene_uniquenames_vec: Vec<GeneUniquename> =
match &feature.feat_type.name as &str {
"polypeptide" => {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
},
"genotype" => {
let expressed_alleles =
&self.alleles_of_genotypes[&feature.uniquename];
let genotype_display_name =
make_genotype_display_name(&expressed_alleles, &self.alleles);
maybe_genotype_uniquename = Some(genotype_display_name.clone());
genotype_background =
self.genotype_backgrounds.get(&feature.uniquename).map(String::clone);
expressed_alleles.iter()
.map(|expressed_allele| {
let allele_short =
self.make_allele_short(&expressed_allele.allele_uniquename);
allele_short.gene_uniquename.clone()
})
.collect()
},
_ => {
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
vec![feature.uniquename.clone()]
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
}
};
gene_uniquenames_vec.dedup();
gene_uniquenames_vec =
gene_uniquenames_vec.iter().map(|gene_uniquename: &String| {
self.make_gene_short(&gene_uniquename).uniquename
}).collect();
let reference_uniquename =
if publication.uniquename == "null" {
None
} else {
Some(publication.uniquename.clone())
};
let mut extra_props_clone = extra_props.clone();
let copies_per_cell = extra_props_clone.remove("quant_gene_ex_copies_per_cell");
let avg_copies_per_cell = extra_props_clone.remove("quant_gene_ex_avg_copies_per_cell");
let scale = extra_props_clone.remove("scale");
let gene_ex_props =
if copies_per_cell.is_some() || avg_copies_per_cell.is_some() {
Some(GeneExProps {
copies_per_cell,
avg_copies_per_cell,
scale,
})
} else {
None
};
if gene_uniquenames_vec.len() > 1 && maybe_genotype_uniquename.is_none() {
panic!("non-genotype annotation has more than one gene");
}
let annotation = OntAnnotationDetail {
id: feature_cvterm.feature_cvterm_id,
genes: gene_uniquenames_vec,
reference: reference_uniquename.clone(),
genotype: maybe_genotype_uniquename.clone(),
genotype_background,
withs: withs.clone(),
froms: froms.clone(),
residue: extra_props_clone.remove("residue"),
gene_ex_props,
qualifiers: qualifiers.clone(),
evidence: evidence.clone(),
conditions: conditions.clone(),
extension: extension.clone(),
};
self.add_annotation(cvterm.borrow(), feature_cvterm.is_not,
annotation);
}
}
fn make_term_annotations(&self, termid: &str, detail_ids: &[OntAnnotationId],
is_not: bool)
-> Vec<(CvName, OntTermAnnotations)> {
let term_details = &self.terms[termid];
let cv_name = term_details.cv_name.clone();
match cv_name.as_ref() {
"gene_ex" => {
if is_not {
panic!("gene_ex annotations can't be NOT annotations");
}
let mut qual_annotations =
OntTermAnnotations {
term: termid.to_owned(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
let mut quant_annotations =
OntTermAnnotations {
term: termid.to_owned(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
for annotation_id in detail_ids {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
if annotation.gene_ex_props.is_some() {
quant_annotations.annotations.push(*annotation_id)
} else {
qual_annotations.annotations.push(*annotation_id)
}
}
let mut return_vec = vec![];
if !qual_annotations.annotations.is_empty() {
return_vec.push((String::from("qualitative_gene_expression"),
qual_annotations));
}
if !quant_annotations.annotations.is_empty() {
return_vec.push((String::from("quantitative_gene_expression"),
quant_annotations));
}
return_vec
},
"fission_yeast_phenotype" => {
let mut single_allele =
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
let mut multi_allele =
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
for annotation_id in detail_ids {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
let genotype_uniquename = annotation.genotype.as_ref().unwrap();
if let Some(genotype_details) = self.genotypes.get(genotype_uniquename) {
if genotype_details.expressed_alleles.len() == 1 {
single_allele.annotations.push(*annotation_id);
} else {
multi_allele.annotations.push(*annotation_id);
}
} else {
panic!("can't find genotype details for {}\n", genotype_uniquename);
}
}
let mut return_vec = vec![];
if !single_allele.annotations.is_empty() {
return_vec.push((String::from("single_allele_phenotype"),
single_allele));
}
if !multi_allele.annotations.is_empty() {
return_vec.push((String::from("multi_allele_phenotype"),
multi_allele));
}
return_vec
},
_ => {
vec![(cv_name,
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: detail_ids.to_owned(),
summary: None,
})]
}
}
}
// store the OntTermAnnotations in the TermDetails, GeneDetails,
// GenotypeDetails and ReferenceDetails
fn store_ont_annotations(&mut self, is_not: bool) {
let ont_annotations = if is_not {
&self.all_not_ont_annotations
} else {
&self.all_ont_annotations
};
let mut gene_annotation_by_term: HashMap<GeneUniquename, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
let mut genotype_annotation_by_term: HashMap<GenotypeUniquename, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
let mut ref_annotation_by_term: HashMap<String, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
for (termid, annotations) in ont_annotations {
let mut sorted_annotations = annotations.clone();
if !is_not {
let cv_config = {
let term = &self.terms[termid];
&self.config.cv_config_by_name(&term.cv_name)
};
{
let cmp_detail_with_maps =
|id1: &i32, id2: &i32| {
let annotation1 = self.annotation_details.
get(&id1).expect("can't find OntAnnotationDetail");
let annotation2 = self.annotation_details.
get(&id2).expect("can't find OntAnnotationDetail");
let result =
cmp_ont_annotation_detail(cv_config,
annotation1, annotation2, &self.genes,
&self.genotypes,
&self.terms);
result.unwrap_or_else(|err| panic!("error from cmp_ont_annotation_detail: {}", err))
};
sorted_annotations.sort_by(cmp_detail_with_maps);
}
let new_annotations =
self.make_term_annotations(&termid, &sorted_annotations, is_not);
if let Some(ref mut term_details) = self.terms.get_mut(termid) {
for (cv_name, new_annotation) in new_annotations {
term_details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
} else {
panic!("missing termid: {}\n", termid);
}
}
for annotation_id in sorted_annotations {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation.genes {
gene_annotation_by_term.entry(gene_uniquename.clone())
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![])
.push(annotation_id);
}
if let Some(ref genotype_uniquename) = annotation.genotype {
let existing =
genotype_annotation_by_term.entry(genotype_uniquename.clone())
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![]);
if !existing.contains(&annotation_id) {
existing.push(annotation_id);
}
}
if let Some(reference_uniquename) = annotation.reference.clone() {
ref_annotation_by_term.entry(reference_uniquename)
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![])
.push(annotation_id);
}
for condition_termid in &annotation.conditions {
let cv_name =
if let Some(ref term_details) = self.terms.get(condition_termid) {
term_details.cv_name.clone()
} else {
panic!("can't find term details for {}", condition_termid);
};
if let Some(ref mut condition_term_details) =
self.terms.get_mut(&condition_termid.clone())
{
condition_term_details.cv_annotations
.entry(cv_name.clone())
.or_insert({
let mut new_vec = Vec::new();
let new_term_annotation =
OntTermAnnotations {
term: condition_termid.clone(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
new_vec.push(new_term_annotation);
new_vec
});
condition_term_details.cv_annotations.get_mut(&cv_name)
.unwrap()[0]
.annotations.push(annotation_id);
}
}
// Add annotations to terms referred to in extensions. They
// are added to fake CV that have a name starting with
// "extension:". The CV name will end with ":genotype" if the
// annotation is a phentoype/genotype, and will end with ":end"
// otherwise. The middle of the fake CV name is the display
// name for the extension relation.
// eg. "extension:directly activates:gene"
for ext_part in &annotation.extension {
if let ExtRange::Term(ref part_termid) = ext_part.ext_range {
let cv_name = "extension:".to_owned() + &ext_part.rel_type_display_name;
if let Some(ref mut part_term_details) =
self.terms.get_mut(part_termid)
{
let extension_cv_name =
if annotation.genotype.is_some() {
cv_name.clone() + ":genotype"
} else {
cv_name.clone() + ":gene"
};
part_term_details.cv_annotations
.entry(extension_cv_name.clone())
.or_insert({
let mut new_vec = Vec::new();
let new_term_annotation =
OntTermAnnotations {
term: part_termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
new_vec.push(new_term_annotation);
new_vec
});
part_term_details.cv_annotations.get_mut(&extension_cv_name)
.unwrap()[0]
.annotations.push(annotation_id);
}
}
}
}
}
let mut term_names = HashMap::new();
for (termid, term_details) in &self.terms {
term_names.insert(termid.clone(), term_details.name.to_lowercase());
}
let ont_term_cmp = |ont_term_1: &OntTermAnnotations, ont_term_2: &OntTermAnnotations| {
if !ont_term_1.is_not && ont_term_2.is_not {
return Ordering::Less;
}
if ont_term_1.is_not && !ont_term_2.is_not {
return Ordering::Greater;
}
let term1 = &term_names[&ont_term_1.term];
let term2 = &term_names[&ont_term_2.term];
term1.cmp(&term2)
};
for (gene_uniquename, term_annotation_map) in &gene_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
gene_details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
}
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for cv_annotations in gene_details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
for (genotype_uniquename, term_annotation_map) in &genotype_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
}
let details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for cv_annotations in details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
for (reference_uniquename, ref_annotation_map) in &ref_annotation_by_term {
for (termid, details) in ref_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
ref_details.cv_annotations.entry(cv_name).or_insert_with(Vec::new)
.push(new_annotation.clone());
}
}
let ref_details = self.references.get_mut(reference_uniquename).unwrap();
for cv_annotations in ref_details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
}
// return true if the term could or should appear in the interesting_parents
// field of the TermDetails and TermShort structs
fn is_interesting_parent(&self, termid: &str, rel_name: &str) -> bool {
self.possible_interesting_parents.contains(&InterestingParent {
termid: termid.into(),
rel_name: rel_name.into(),
})
}
fn process_cvtermpath(&mut self) {
let mut new_annotations: HashMap<(CvName, TermId), HashMap<TermId, HashMap<i32, HashSet<RelName>>>> =
HashMap::new();
let mut children_by_termid: HashMap<TermId, HashSet<TermId>> = HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
if let Some(subject_term_details) = self.terms.get(&subject_termid) {
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if rel_term_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&subject_term_details.cv_name.as_str()) {
continue;
}
if !DESCENDANT_REL_NAMES.contains(&rel_term_name.as_str()) {
continue;
}
if subject_term_details.cv_annotations.keys().len() > 0 {
if let Some(object_term_details) = self.terms.get(&object_termid) {
if object_term_details.cv_annotations.keys().len() > 0 {
children_by_termid
.entry(object_termid.clone())
.or_insert_with(HashSet::new)
.insert(subject_termid.clone());
}
}
}
for (cv_name, term_annotations) in &subject_term_details.cv_annotations {
for term_annotation in term_annotations {
for annotation_id in &term_annotation.annotations {
let dest_termid = object_termid.clone();
let source_termid = subject_termid.clone();
if !term_annotation.is_not {
new_annotations.entry((cv_name.clone(), dest_termid))
.or_insert_with(HashMap::new)
.entry(source_termid)
.or_insert_with(HashMap::new)
.entry(*annotation_id)
.or_insert_with(HashSet::new)
.insert(rel_term_name.clone());
}
}
}
}
} else {
panic!("TermDetails not found for {}", &subject_termid);
}
}
for ((dest_cv_name, dest_termid), dest_annotations_map) in new_annotations.drain() {
for (source_termid, source_annotations_map) in dest_annotations_map {
let mut new_annotations: Vec<OntAnnotationId> = vec![];
let mut all_rel_names: HashSet<String> = HashSet::new();
for (annotation_id, rel_names) in source_annotations_map {
new_annotations.push(annotation_id);
for rel_name in rel_names {
all_rel_names.insert(rel_name);
}
}
let dest_cv_config = &self.config.cv_config_by_name(&dest_cv_name);
{
let cmp_detail_with_genotypes =
|id1: &i32, id2: &i32| {
let annotation1 = self.annotation_details.
get(&id1).expect("can't find OntAnnotationDetail");
let annotation2 = self.annotation_details.
get(&id2).expect("can't find OntAnnotationDetail");
let result =
cmp_ont_annotation_detail(dest_cv_config,
annotation1, annotation2, &self.genes,
&self.genotypes, &self.terms);
result.unwrap_or_else(|err| {
panic!("error from cmp_ont_annotation_detail: {} with terms: {} and {}",
err, source_termid, dest_termid)
})
};
new_annotations.sort_by(cmp_detail_with_genotypes);
}
let new_annotations =
self.make_term_annotations(&source_termid, &new_annotations, false);
let dest_term_details = {
self.terms.get_mut(&dest_termid).unwrap()
};
for (_, new_annotation) in new_annotations {
let mut new_annotation_clone = new_annotation.clone();
new_annotation_clone.rel_names.extend(all_rel_names.clone());
dest_term_details.cv_annotations
.entry(dest_cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation_clone);
}
}
}
let mut term_names = HashMap::new();
for (termid, term_details) in &self.terms {
term_names.insert(termid.clone(), term_details.name.to_lowercase());
}
let ont_term_cmp = |ont_term_1: &OntTermAnnotations, ont_term_2: &OntTermAnnotations| {
if !ont_term_1.is_not && ont_term_2.is_not {
return Ordering::Less;
}
if ont_term_1.is_not && !ont_term_2.is_not {
return Ordering::Greater;
}
let term1 = &term_names[&ont_term_1.term];
let term2 = &term_names[&ont_term_2.term];
term1.cmp(&term2)
};
for term_details in self.terms.values_mut() {
for term_annotations in term_details.cv_annotations.values_mut() {
term_annotations.sort_by(&ont_term_cmp);
}
}
self.children_by_termid = children_by_termid;
}
fn make_metadata(&mut self) -> Metadata {
let mut db_creation_datetime = None;
for chadoprop in &self.raw.chadoprops {
if chadoprop.prop_type.name == "db_creation_datetime" {
db_creation_datetime = chadoprop.value.clone();
}
}
let mut cv_versions = HashMap::new();
for cvprop in &self.raw.cvprops {
if cvprop.prop_type.name == "cv_version" {
cv_versions.insert(cvprop.cv.name.clone(), cvprop.value.clone());
}
}
const PKG_NAME: &str = env!("CARGO_PKG_NAME");
const VERSION: &str = env!("CARGO_PKG_VERSION");
Metadata {
export_prog_name: String::from(PKG_NAME),
export_prog_version: String::from(VERSION),
db_creation_datetime: db_creation_datetime.unwrap(),
gene_count: self.genes.len(),
term_count: self.terms.len(),
cv_versions,
}
}
pub fn get_api_genotype_annotation(&self) -> HashMap<TermId, Vec<APIGenotypeAnnotation>>
{
let mut app_genotype_annotation = HashMap::new();
for term_details in self.terms.values() {
for annotations_vec in term_details.cv_annotations.values() {
for ont_term_annotations in annotations_vec {
'DETAILS: for annotation_id in &ont_term_annotations.annotations {
let annotation_details = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
if annotation_details.genotype.is_none() {
continue 'DETAILS;
}
let genotype_uniquename = annotation_details.genotype.clone().unwrap();
let genotype =
&term_details.genotypes_by_uniquename[&genotype_uniquename];
let mut api_annotation = APIGenotypeAnnotation {
is_multi: genotype.expressed_alleles.len() > 1,
alleles: vec![],
};
for allele in &genotype.expressed_alleles {
let allele_uniquename = &allele.allele_uniquename;
let allele_short =
self.alleles.get(allele_uniquename).expect("Can't find allele");
let allele_gene_uniquename =
allele_short.gene_uniquename.clone();
let allele_details = APIAlleleDetails {
gene: allele_gene_uniquename,
allele_type: allele_short.allele_type.clone(),
expression: allele.expression.clone(),
};
api_annotation.alleles.push(allele_details);
}
app_genotype_annotation
.entry(term_details.termid.clone())
.or_insert_with(|| vec![])
.push(api_annotation);
}
}
}
}
app_genotype_annotation
}
fn make_gene_query_go_component_data(&self, gene_details: &GeneDetails) -> Option<GeneQueryTermData> {
let go_components_config = &self.config.query_data_config.go_components;
let component_term_annotations =
gene_details.cv_annotations.get("cellular_component");
if component_term_annotations.is_none() {
return None;
}
let in_component = |check_termid: &str| {
for term_annotation in component_term_annotations.unwrap() {
let maybe_term_details = self.terms.get(&term_annotation.term);
let term_details =
maybe_term_details .unwrap_or_else(|| {
panic!("can't find TermDetails for {}", &term_annotation.term)
});
let interesting_parents = &term_details.interesting_parents;
if !term_annotation.is_not &&
(term_annotation.term == check_termid ||
interesting_parents.contains(check_termid))
{
return true;
}
}
false
};
for go_component_termid in go_components_config {
if in_component(go_component_termid) {
return Some(GeneQueryTermData::Term(TermAndName {
termid: go_component_termid.to_owned(),
name: self.terms.get(go_component_termid).unwrap().name.clone(),
}));
}
}
Some(GeneQueryTermData::Other)
}
fn get_ortholog_taxonids(&self, gene_details: &GeneDetails)
-> HashSet<u32>
{
let mut return_set = HashSet::new();
for ortholog_annotation in &gene_details.ortholog_annotations {
return_set.insert(ortholog_annotation.ortholog_taxonid);
}
return_set
}
fn make_gene_query_data_map(&self) -> HashMap<GeneUniquename, GeneQueryData> {
let mut gene_query_data_map = HashMap::new();
for gene_details in self.genes.values() {
let ortholog_taxonids = self.get_ortholog_taxonids(gene_details);
let gene_query_data = GeneQueryData {
gene_uniquename: gene_details.uniquename.clone(),
deletion_viability: gene_details.deletion_viability.clone(),
go_component: self.make_gene_query_go_component_data(gene_details),
ortholog_taxonids,
};
gene_query_data_map.insert(gene_details.uniquename.clone(), gene_query_data);
}
gene_query_data_map
}
pub fn make_api_maps(mut self) -> APIMaps {
let mut gene_summaries: HashMap<GeneUniquename, APIGeneSummary> = HashMap::new();
let mut gene_name_gene_map = HashMap::new();
let mut interactors_of_genes = HashMap::new();
for (gene_uniquename, gene_details) in &self.genes {
if self.config.load_organism_taxonid == gene_details.taxonid {
let gene_summary = self.make_api_gene_summary(&gene_uniquename);
if let Some(ref gene_name) = gene_summary.name {
gene_name_gene_map.insert(gene_name.clone(), gene_uniquename.clone());
}
gene_summaries.insert(gene_uniquename.clone(), gene_summary);
let mut interactors = vec![];
for interaction_annotation in &gene_details.physical_interactions {
let interactor_uniquename =
if gene_uniquename == &interaction_annotation.gene_uniquename {
interaction_annotation.interactor_uniquename.clone()
} else {
interaction_annotation.gene_uniquename.clone()
};
let interactor = APIInteractor {
interaction_type: InteractionType::Physical,
interactor_uniquename,
};
if !interactors.contains(&interactor) {
interactors.push(interactor);
}
}
for interaction_annotation in &gene_details.genetic_interactions {
let interactor_uniquename =
if gene_uniquename == &interaction_annotation.gene_uniquename {
interaction_annotation.interactor_uniquename.clone()
} else {
interaction_annotation.gene_uniquename.clone()
};
let interactor = APIInteractor {
interaction_type: InteractionType::Genetic,
interactor_uniquename,
};
if !interactors.contains(&interactor) {
interactors.push(interactor);
}
}
interactors_of_genes.insert(gene_uniquename.clone(), interactors);
}
}
let gene_query_data_map = self.make_gene_query_data_map();
let mut term_summaries: HashSet<TermShort> = HashSet::new();
let mut termid_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut terms_for_api: HashMap<TermId, TermDetails> = HashMap::new();
for termid in self.terms.keys() {
term_summaries.insert(self.make_term_short(termid));
}
let termid_genotype_annotation: HashMap<TermId, Vec<APIGenotypeAnnotation>> =
self.get_api_genotype_annotation();
for (termid, term_details) in self.terms.drain() {
let cv_config = &self.config.cv_config;
if let Some(term_config) = cv_config.get(&term_details.cv_name) {
if term_config.feature_type == "gene" {
termid_genes.insert(termid.clone(),
term_details.genes_annotated_with.clone());
}
}
terms_for_api.insert(termid.clone(), term_details.clone());
}
APIMaps {
gene_summaries,
gene_query_data_map,
termid_genes,
termid_genotype_annotation,
term_summaries,
genes: self.genes.clone(),
gene_name_gene_map,
genotypes: self.genotypes.clone(),
terms: terms_for_api.clone(),
interactors_of_genes,
references: self.references.clone(),
other_features: self.other_features.clone(),
annotation_details: self.annotation_details.clone(),
}
}
fn add_cv_annotations_to_maps(&self,
identifier: &str,
cv_annotations: &OntAnnotationMap,
seen_references: &mut HashMap<String, ReferenceShortOptionMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_terms: &mut HashMap<String, TermShortOptionMap>) {
for feat_annotations in cv_annotations.values() {
for feat_annotation in feat_annotations.iter() {
self.add_term_to_hash(seen_terms, identifier,
&feat_annotation.term);
for annotation_detail_id in &feat_annotation.annotations {
let annotation_detail = self.annotation_details.
get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
self.add_ref_to_hash(seen_references, identifier,
&annotation_detail.reference);
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(seen_terms, identifier,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(seen_terms, identifier,
range_termid),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(seen_genes, identifier,
allele_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
self.add_genotype_to_hash(seen_genotypes, seen_alleles, seen_genes,
identifier,
&genotype_uniquename);
}
}
}
}
}
fn set_term_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
let mut genes_annotated_with_map: HashMap<TermId, HashSet<GeneUniquename>> =
HashMap::new();
for (termid, term_details) in &self.terms {
for (cv_name, term_annotations) in &term_details.cv_annotations {
for term_annotation in term_annotations {
self.add_term_to_hash(&mut seen_terms, termid,
&term_annotation.term);
for annotation_detail_id in &term_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
self.add_gene_to_hash(&mut seen_genes, termid,
gene_uniquename);
if !cv_name.starts_with("extension:") {
// prevent extension annotations from appears
// in the normal query builder searches
genes_annotated_with_map
.entry(termid.clone()).or_insert_with(HashSet::new)
.insert(gene_uniquename.clone());
}
}
self.add_ref_to_hash(&mut seen_references, termid,
&annotation_detail.reference);
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(&mut seen_terms, termid,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, termid,
range_termid),
ExtRange::Gene(ref ext_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, termid,
ext_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles,
&mut seen_genes, termid,
&genotype_uniquename);
}
}
}
}
}
for (termid, term_details) in &mut self.terms {
if let Some(genes) = seen_genes.remove(termid) {
term_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(termid) {
term_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(termid) {
term_details.alleles_by_uniquename = alleles;
}
if let Some(references) = seen_references.remove(termid) {
term_details.references_by_uniquename = references;
}
if let Some(terms) = seen_terms.remove(termid) {
term_details.terms_by_termid = terms;
}
if let Some(gene_uniquename_set) = genes_annotated_with_map.remove(termid) {
term_details.genes_annotated_with = gene_uniquename_set;
}
}
}
fn set_gene_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
{
for (gene_uniquename, gene_details) in &self.genes {
self.add_cv_annotations_to_maps(&gene_uniquename,
&gene_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
let interaction_iter =
gene_details.physical_interactions.iter().chain(&gene_details.genetic_interactions);
for interaction in interaction_iter {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&interaction.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&interaction.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&interaction.interactor_uniquename);
}
for ortholog_annotation in &gene_details.ortholog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&ortholog_annotation.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&ortholog_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&ortholog_annotation.ortholog_uniquename);
}
for paralog_annotation in &gene_details.paralog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
¶log_annotation.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
¶log_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
¶log_annotation.paralog_uniquename);
}
for target_of_annotation in &gene_details.target_of_annotations {
for annotation_gene_uniquename in &target_of_annotation.genes {
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
annotation_gene_uniquename);
}
if let Some(ref annotation_genotype_uniquename) = target_of_annotation.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
gene_uniquename,
&annotation_genotype_uniquename)
}
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&target_of_annotation.reference_uniquename);
}
}
}
for (gene_uniquename, gene_details) in &mut self.genes {
if let Some(references) = seen_references.remove(gene_uniquename) {
gene_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(gene_uniquename) {
gene_details.alleles_by_uniquename = alleles;
}
if let Some(genes) = seen_genes.remove(gene_uniquename) {
gene_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(gene_uniquename) {
gene_details.genotypes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(gene_uniquename) {
gene_details.terms_by_termid = terms;
}
}
}
fn set_genotype_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (genotype_uniquename, genotype_details) in &self.genotypes {
self.add_cv_annotations_to_maps(&genotype_uniquename,
&genotype_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
if let Some(references) = seen_references.remove(genotype_uniquename) {
genotype_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(genotype_uniquename) {
genotype_details.alleles_by_uniquename = alleles;
}
if let Some(genotypes) = seen_genes.remove(genotype_uniquename) {
genotype_details.genes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(genotype_uniquename) {
genotype_details.terms_by_termid = terms;
}
}
}
fn set_reference_details_maps(&mut self) {
let mut seen_genes: HashMap<String, GeneShortOptionMap> = HashMap::new();
type GenotypeShortMap = HashMap<GenotypeUniquename, GenotypeShort>;
let mut seen_genotypes: HashMap<ReferenceUniquename, GenotypeShortMap> = HashMap::new();
type AlleleShortMap = HashMap<AlleleUniquename, AlleleShort>;
let mut seen_alleles: HashMap<TermId, AlleleShortMap> = HashMap::new();
let mut seen_terms: HashMap<GeneUniquename, TermShortOptionMap> = HashMap::new();
{
for (reference_uniquename, reference_details) in &self.references {
for feat_annotations in reference_details.cv_annotations.values() {
for feat_annotation in feat_annotations.iter() {
self.add_term_to_hash(&mut seen_terms, reference_uniquename,
&feat_annotation.term);
for annotation_detail_id in &feat_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
gene_uniquename)
}
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(&mut seen_terms, reference_uniquename,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms,
reference_uniquename,
range_termid),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes,
reference_uniquename,
allele_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
let genotype = self.make_genotype_short(genotype_uniquename);
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
reference_uniquename,
&genotype.display_uniquename);
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter()
.chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&interaction.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&interaction.interactor_uniquename);
}
for ortholog_annotation in &reference_details.ortholog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&ortholog_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&ortholog_annotation.ortholog_uniquename);
}
for paralog_annotation in &reference_details.paralog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
¶log_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
¶log_annotation.paralog_uniquename);
}
}
}
for (reference_uniquename, reference_details) in &mut self.references {
if let Some(genes) = seen_genes.remove(reference_uniquename) {
reference_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(reference_uniquename) {
reference_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(reference_uniquename) {
reference_details.alleles_by_uniquename = alleles;
}
if let Some(terms) = seen_terms.remove(reference_uniquename) {
reference_details.terms_by_termid = terms;
}
}
}
pub fn set_counts(&mut self) {
let mut term_seen_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_seen_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut term_seen_single_allele_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut ref_seen_genes: HashMap<ReferenceUniquename, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
let mut seen_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
let mut seen_single_allele_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
for term_annotations in term_details.cv_annotations.values() {
for term_annotation in term_annotations {
for annotation_detail_id in &term_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
seen_genes.insert(gene_uniquename.clone());
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
seen_genotypes.insert(genotype_uniquename.clone());
let genotype = &self.genotypes[genotype_uniquename];
if genotype.expressed_alleles.len() == 1 {
seen_single_allele_genotypes.insert(genotype_uniquename.clone());
}
}
}
}
}
term_seen_genes.insert(termid.clone(), seen_genes);
term_seen_genotypes.insert(termid.clone(), seen_genotypes);
term_seen_single_allele_genotypes.insert(termid.clone(), seen_single_allele_genotypes);
}
for (reference_uniquename, reference_details) in &self.references {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
for rel_annotations in reference_details.cv_annotations.values() {
for rel_annotation in rel_annotations {
for annotation_detail_id in &rel_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
if !rel_annotation.is_not {
for gene_uniquename in &annotation_detail.genes {
seen_genes.insert(gene_uniquename.clone());
}
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
seen_genes.insert(interaction.gene_uniquename.clone());
seen_genes.insert(interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
seen_genes.insert(ortholog_annotation.gene_uniquename.clone());
}
ref_seen_genes.insert(reference_uniquename.clone(), seen_genes);
}
for term_details in self.terms.values_mut() {
term_details.single_allele_genotype_uniquenames =
term_seen_single_allele_genotypes.remove(&term_details.termid).unwrap();
term_details.gene_count =
term_seen_genes[&term_details.termid].len();
term_details.genotype_count =
term_seen_genotypes[&term_details.termid].len();
}
}
fn make_non_bp_slim_gene_subset(&self, go_slim_subset: &TermSubsetDetails)
-> IdGeneSubsetMap
{
let slim_termid_set: HashSet<String> =
go_slim_subset.elements
.iter().map(|element| element.termid.clone()).collect();
let mut non_slim_with_bp_annotation = HashSet::new();
let mut non_slim_without_bp_annotation = HashSet::new();
let has_parent_in_slim = |term_annotations: &Vec<OntTermAnnotations>| {
for term_annotation in term_annotations {
let interesting_parents =
&self.terms[&term_annotation.term].interesting_parents;
if !term_annotation.is_not &&
(slim_termid_set.contains(&term_annotation.term) ||
interesting_parents.intersection(&slim_termid_set).count() > 0)
{
return true;
}
}
false
};
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
if gene_details.feature_type != "mRNA gene" {
continue;
}
if gene_details.characterisation_status == Some("transposon".into()) ||
gene_details.characterisation_status == Some("dubious".into())
{
continue;
}
let mut bp_count = 0;
if let Some(annotations) =
gene_details.cv_annotations.get("biological_process") {
if has_parent_in_slim(&annotations) {
continue
}
bp_count = annotations.len();
}
if bp_count == 0 {
non_slim_without_bp_annotation.insert(gene_details.uniquename.clone());
} else {
non_slim_with_bp_annotation.insert(gene_details.uniquename.clone());
}
}
let mut return_map = HashMap::new();
return_map.insert("non_go_slim_with_bp_annotation".into(),
GeneSubsetDetails {
name: "non_go_slim_with_bp_annotation".into(),
display_name: String::from("Proteins with biological process ") +
"annotation that are not in a slim category",
elements: non_slim_with_bp_annotation,
});
return_map.insert("non_go_slim_without_bp_annotation".into(),
GeneSubsetDetails {
name: "non_go_slim_without_bp_annotation".into(),
display_name: String::from("Proteins with no biological process ") +
"annotation and are not in a slim category",
elements: non_slim_without_bp_annotation,
});
return_map
}
fn make_bp_go_slim_subset(&self) -> TermSubsetDetails {
let mut all_genes = HashSet::new();
let mut go_slim_subset: HashSet<TermSubsetElement> = HashSet::new();
for go_slim_conf in self.config.go_slim_terms.clone() {
let slim_termid = go_slim_conf.termid;
let term_details = self.terms.get(&slim_termid)
.unwrap_or_else(|| panic!("can't find TermDetails for {}", &slim_termid));
let subset_element = TermSubsetElement {
name: term_details.name.clone(),
termid: slim_termid.clone(),
gene_count: term_details.genes_annotated_with.len(),
};
for gene in &term_details.genes_annotated_with {
all_genes.insert(gene);
}
go_slim_subset.insert(subset_element);
}
TermSubsetDetails {
name: "goslim_pombe".into(),
total_gene_count: all_genes.len(),
elements: go_slim_subset,
}
}
fn make_feature_type_subsets(&self, subsets: &mut IdGeneSubsetMap) {
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
let subset_name =
String::from("feature_type:") + &gene_details.feature_type;
let re = Regex::new(r"[\s,:]+").unwrap();
let subset_name_no_spaces: String = re.replace_all(&subset_name, "_").into();
subsets.entry(subset_name_no_spaces.clone())
.or_insert(GeneSubsetDetails {
name: subset_name_no_spaces,
display_name: subset_name,
elements: HashSet::new()
})
.elements.insert(gene_details.uniquename.clone());
}
}
// make subsets using the characterisation_status field of GeneDetails
fn make_characterisation_status_subsets(&self, subsets: &mut IdGeneSubsetMap) {
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
if gene_details.feature_type != "mRNA gene" {
continue;
}
if let Some(ref characterisation_status) = gene_details.characterisation_status {
let subset_name =
String::from("characterisation_status:") + &characterisation_status;
let re = Regex::new(r"[\s,:]+").unwrap();
let subset_name_no_spaces: String = re.replace_all(&subset_name, "_").into();
subsets.entry(subset_name_no_spaces.clone())
.or_insert(GeneSubsetDetails {
name: subset_name_no_spaces,
display_name: subset_name,
elements: HashSet::new()
})
.elements.insert(gene_details.uniquename.clone());
}
}
}
// make InterPro subsets using the interpro_matches field of GeneDetails
fn make_interpro_subsets(&mut self, subsets: &mut IdGeneSubsetMap) {
for (gene_uniquename, gene_details) in &self.genes {
for interpro_match in &gene_details.interpro_matches {
let mut new_subset_names = vec![];
if !interpro_match.interpro_id.is_empty() {
let subset_name =
String::from("interpro:") + &interpro_match.interpro_id;
new_subset_names.push((subset_name,
interpro_match.interpro_name.clone()));
}
let subset_name = String::from("interpro:") +
&interpro_match.dbname.clone() + ":" + &interpro_match.id;
new_subset_names.push((subset_name, interpro_match.name.clone()));
for (subset_name, display_name) in new_subset_names {
subsets.entry(subset_name.clone())
.or_insert(GeneSubsetDetails {
name: subset_name,
display_name,
elements: HashSet::new(),
})
.elements.insert(gene_uniquename.clone());
}
}
}
}
// populated the subsets HashMap
fn make_subsets(&mut self) {
let bp_go_slim_subset = self.make_bp_go_slim_subset();
let mut gene_subsets =
self.make_non_bp_slim_gene_subset(&bp_go_slim_subset);
self.term_subsets.insert("bp_goslim_pombe".into(), bp_go_slim_subset);
self.make_feature_type_subsets(&mut gene_subsets);
self.make_characterisation_status_subsets(&mut gene_subsets);
self.make_interpro_subsets(&mut gene_subsets);
self.gene_subsets = gene_subsets;
}
// sort the list of genes in the ChromosomeDetails by start_pos
pub fn sort_chromosome_genes(&mut self) {
let mut genes_to_sort: HashMap<ChromosomeName, Vec<GeneUniquename>> =
HashMap::new();
{
let sorter = |uniquename1: &GeneUniquename, uniquename2: &GeneUniquename| {
let gene1 = &self.genes[uniquename1];
let gene2 = &self.genes[uniquename2];
if let Some(ref gene1_loc) = gene1.location {
if let Some(ref gene2_loc) = gene2.location {
let cmp = gene1_loc.start_pos.cmp(&gene2_loc.start_pos);
if cmp != Ordering::Equal {
return cmp;
}
}
}
if gene1.name.is_some() {
if gene2.name.is_some() {
gene1.name.cmp(&gene2.name)
} else {
Ordering::Less
}
} else {
if gene2.name.is_some() {
Ordering::Greater
} else {
gene1.uniquename.cmp(&gene2.uniquename)
}
}
};
for (chr_uniquename, chr_details) in &self.chromosomes {
genes_to_sort.insert(chr_uniquename.clone(),
chr_details.gene_uniquenames.clone());
}
for gene_uniquenames in genes_to_sort.values_mut() {
gene_uniquenames.sort_by(&sorter);
}
}
for (chr_uniquename, gene_uniquenames) in genes_to_sort {
self.chromosomes.get_mut(&chr_uniquename).unwrap().gene_uniquenames =
gene_uniquenames;
}
}
// remove some of the refs that have no annotations.
// See: https://github.com/pombase/website/issues/628
fn remove_non_curatable_refs(&mut self) {
let filtered_refs = self.references.drain()
.filter(|&(_, ref reference_details)| {
if reference_has_annotation(reference_details) {
return true;
}
if let Some(ref canto_triage_status) = reference_details.canto_triage_status {
if canto_triage_status == "New" {
return false;
}
} else {
if reference_details.uniquename.starts_with("PMID:") {
print!("reference {} has no canto_triage_status\n", reference_details.uniquename);
}
}
if let Some (ref triage_status) = reference_details.canto_triage_status {
return triage_status != "Wrong organism" && triage_status != "Loaded in error";
}
// default to true because there are references that
// haven't or shouldn't be triaged, eg. GO_REF:...
true
})
.into_iter().collect();
self.references = filtered_refs;
}
fn make_solr_term_summaries(&mut self) -> Vec<SolrTermSummary> {
let mut return_summaries = vec![];
let term_name_split_re = Regex::new(r"\W+").unwrap();
for (termid, term_details) in &self.terms {
if term_details.cv_annotations.is_empty() {
continue;
}
let trimmable_p = |c: char| {
c.is_whitespace() || c == ',' || c == ':'
|| c == ';' || c == '.' || c == '\''
};
let term_name_words =
term_name_split_re.split(&term_details.name)
.map(|s: &str| {
s.trim_matches(&trimmable_p).to_owned()
}).collect::<Vec<String>>();
let mut close_synonyms = vec![];
let mut close_synonym_words_vec: Vec<String> = vec![];
let mut distant_synonyms = vec![];
let mut distant_synonym_words_vec: Vec<String> = vec![];
let add_to_words_vec = |synonym: &str, words_vec: &mut Vec<String>| {
let synonym_words = term_name_split_re.split(&synonym);
for word in synonym_words {
let word_string = word.trim_matches(&trimmable_p).to_owned();
if !words_vec.contains(&word_string) &&
!term_name_words.contains(&word_string) {
words_vec.push(word_string);
}
}
};
for synonym in &term_details.synonyms {
if synonym.synonym_type == "exact" || synonym.synonym_type == "narrow" {
add_to_words_vec(&synonym.name, &mut close_synonym_words_vec);
close_synonyms.push(synonym.name.clone());
} else {
add_to_words_vec(&synonym.name, &mut distant_synonym_words_vec);
distant_synonyms.push(synonym.name.clone());
}
}
distant_synonyms = distant_synonyms.into_iter()
.filter(|synonym| {
!close_synonyms.contains(&synonym)
})
.collect::<Vec<_>>();
let interesting_parents_for_solr =
term_details.interesting_parents.clone();
let term_summ = SolrTermSummary {
id: termid.clone(),
cv_name: term_details.cv_name.clone(),
name: term_details.name.clone(),
definition: term_details.definition.clone(),
close_synonyms,
close_synonym_words: close_synonym_words_vec.join(" "),
distant_synonyms,
distant_synonym_words: distant_synonym_words_vec.join(" "),
interesting_parents: interesting_parents_for_solr,
};
return_summaries.push(term_summ);
}
return_summaries
}
fn make_solr_reference_summaries(&mut self) -> Vec<SolrReferenceSummary> {
let mut return_summaries = vec![];
for reference_details in self.references.values() {
return_summaries.push(SolrReferenceSummary::from_reference_details(reference_details));
}
return_summaries
}
pub fn get_web_data(mut self) -> WebData {
self.process_dbxrefs();
self.process_references();
self.process_chromosome_features();
self.make_feature_rel_maps();
self.process_features();
self.add_gene_neighbourhoods();
self.process_props_from_feature_cvterms();
self.process_allele_features();
self.process_genotype_features();
self.process_cvterms();
self.add_interesting_parents();
self.process_cvterm_rels();
self.process_extension_cvterms();
self.process_feature_synonyms();
self.process_feature_cvterms();
self.store_ont_annotations(false);
self.store_ont_annotations(true);
self.process_cvtermpath();
self.process_annotation_feature_rels();
self.add_target_of_annotations();
self.set_deletion_viability();
self.set_term_details_subsets();
self.make_all_cv_summaries();
self.remove_non_curatable_refs();
self.set_term_details_maps();
self.set_gene_details_maps();
self.set_genotype_details_maps();
self.set_reference_details_maps();
self.set_counts();
self.make_subsets();
self.sort_chromosome_genes();
let metadata = self.make_metadata();
let mut gene_summaries: Vec<GeneSummary> = vec![];
for (gene_uniquename, gene_details) in &self.genes {
if self.config.load_organism_taxonid == gene_details.taxonid {
gene_summaries.push(self.make_gene_summary(&gene_uniquename));
}
}
let solr_term_summaries = self.make_solr_term_summaries();
let solr_reference_summaries = self.make_solr_reference_summaries();
let solr_data = SolrData {
term_summaries: solr_term_summaries,
gene_summaries: gene_summaries.clone(),
reference_summaries: solr_reference_summaries,
};
let chromosomes = self.chromosomes.clone();
let mut chromosome_summaries = vec![];
for chr_details in self.chromosomes.values() {
chromosome_summaries.push(chr_details.make_chromosome_short());
}
let term_subsets = self.term_subsets.clone();
let gene_subsets = self.gene_subsets.clone();
let recent_references = self.recent_references.clone();
let all_community_curated = self.all_community_curated.clone();
WebData {
metadata,
chromosomes,
chromosome_summaries,
recent_references,
all_community_curated,
api_maps: self.make_api_maps(),
search_gene_summaries: gene_summaries,
term_subsets,
gene_subsets,
solr_data,
}
}
}
Fix hiding of terms with no annotation
Refs pombase/website#864
use std::rc::Rc;
use std::collections::{BTreeMap, HashMap};
use std::collections::HashSet;
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::u32;
use regex::Regex;
use db::*;
use types::*;
use web::data::*;
use web::config::*;
use web::cv_summary::make_cv_summaries;
use web::util::cmp_str_dates;
use interpro::UniprotResult;
fn make_organism(rc_organism: &Rc<Organism>) -> ConfigOrganism {
let mut maybe_taxonid: Option<u32> = None;
for prop in rc_organism.organismprops.borrow().iter() {
if prop.prop_type.name == "taxon_id" {
maybe_taxonid = Some(prop.value.parse().unwrap());
}
}
ConfigOrganism {
taxonid: maybe_taxonid.unwrap(),
genus: rc_organism.genus.clone(),
species: rc_organism.species.clone(),
}
}
type TermShortOptionMap = HashMap<TermId, Option<TermShort>>;
type UniprotIdentifier = String;
pub struct WebDataBuild<'a> {
raw: &'a Raw,
domain_data: &'a HashMap<UniprotIdentifier, UniprotResult>,
config: &'a Config,
genes: UniquenameGeneMap,
genotypes: UniquenameGenotypeMap,
genotype_backgrounds: HashMap<GenotypeUniquename, String>,
alleles: UniquenameAlleleMap,
other_features: UniquenameFeatureShortMap,
terms: TermIdDetailsMap,
chromosomes: ChrNameDetailsMap,
references: UniquenameReferenceMap,
all_ont_annotations: HashMap<TermId, Vec<OntAnnotationId>>,
all_not_ont_annotations: HashMap<TermId, Vec<OntAnnotationId>>,
genes_of_transcripts: HashMap<String, String>,
transcripts_of_polypeptides: HashMap<String, String>,
parts_of_transcripts: HashMap<String, Vec<FeatureShort>>,
genes_of_alleles: HashMap<String, String>,
alleles_of_genotypes: HashMap<String, Vec<ExpressedAllele>>,
// a map from IDs of terms from the "PomBase annotation extension terms" cv
// to a Vec of the details of each of the extension
parts_of_extensions: HashMap<TermId, Vec<ExtPart>>,
base_term_of_extensions: HashMap<TermId, TermId>,
children_by_termid: HashMap<TermId, HashSet<TermId>>,
dbxrefs_of_features: HashMap<String, HashSet<String>>,
possible_interesting_parents: HashSet<InterestingParent>,
recent_references: RecentReferences,
all_community_curated: Vec<ReferenceShort>,
term_subsets: IdTermSubsetMap,
gene_subsets: IdGeneSubsetMap,
annotation_details: IdOntAnnotationDetailMap,
}
fn get_maps() ->
(HashMap<String, ReferenceShortOptionMap>,
HashMap<String, GeneShortOptionMap>,
HashMap<String, GenotypeShortMap>,
HashMap<String, AlleleShortMap>,
HashMap<GeneUniquename, TermShortOptionMap>)
{
(HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new())
}
fn get_feat_rel_expression(feature: &Feature,
feature_relationship: &FeatureRelationship) -> Option<String> {
for feature_prop in feature.featureprops.borrow().iter() {
if feature_prop.prop_type.name == "allele_type" {
if let Some(ref value) = feature_prop.value {
if value == "deletion" {
return Some("Null".into());
}
}
}
}
for rel_prop in feature_relationship.feature_relationshipprops.borrow().iter() {
if rel_prop.prop_type.name == "expression" {
return rel_prop.value.clone();
}
}
None
}
fn reference_has_annotation(reference_details: &ReferenceDetails) -> bool {
!reference_details.cv_annotations.is_empty() ||
!reference_details.physical_interactions.is_empty() ||
!reference_details.genetic_interactions.is_empty() ||
!reference_details.ortholog_annotations.is_empty() ||
!reference_details.paralog_annotations.is_empty()
}
fn is_gene_type(feature_type_name: &str) -> bool {
feature_type_name == "gene" || feature_type_name == "pseudogene"
}
pub fn compare_ext_part_with_config(config: &Config, ep1: &ExtPart, ep2: &ExtPart) -> Ordering {
let rel_order_conf = &config.extension_relation_order;
let order_conf = &rel_order_conf.relation_order;
let always_last_conf = &rel_order_conf.always_last;
let maybe_ep1_index = order_conf.iter().position(|r| *r == ep1.rel_type_name);
let maybe_ep2_index = order_conf.iter().position(|r| *r == ep2.rel_type_name);
if let Some(ep1_index) = maybe_ep1_index {
if let Some(ep2_index) = maybe_ep2_index {
ep1_index.cmp(&ep2_index)
} else {
Ordering::Less
}
} else {
if maybe_ep2_index.is_some() {
Ordering::Greater
} else {
let maybe_ep1_last_index = always_last_conf.iter().position(|r| *r == ep1.rel_type_name);
let maybe_ep2_last_index = always_last_conf.iter().position(|r| *r == ep2.rel_type_name);
if let Some(ep1_last_index) = maybe_ep1_last_index {
if let Some(ep2_last_index) = maybe_ep2_last_index {
ep1_last_index.cmp(&ep2_last_index)
} else {
Ordering::Greater
}
} else {
if maybe_ep2_last_index.is_some() {
Ordering::Less
} else {
let name_cmp = ep1.rel_type_name.cmp(&ep2.rel_type_name);
if name_cmp == Ordering::Equal {
if ep1.ext_range.is_gene() && !ep2.ext_range.is_gene() {
Ordering::Less
} else {
if !ep1.ext_range.is_gene() && ep2.ext_range.is_gene() {
Ordering::Greater
} else {
Ordering::Equal
}
}
} else {
name_cmp
}
}
}
}
}
}
fn string_from_ext_range(ext_range: &ExtRange,
genes: &UniquenameGeneMap, terms: &TermIdDetailsMap) -> String {
match *ext_range {
ExtRange::Gene(ref gene_uniquename) => {
let gene = genes.get(gene_uniquename)
.unwrap_or_else(|| panic!("can't find gene: {}", gene_uniquename));
gene_display_name(gene)
},
ExtRange::Promoter(ref promoter_uniquename) => promoter_uniquename.clone(),
ExtRange::SummaryGenes(_) => panic!("can't handle SummaryGenes\n"),
ExtRange::Term(ref termid) => terms.get(termid).unwrap().name.clone(),
ExtRange::SummaryTerms(_) => panic!("can't handle SummaryGenes\n"),
ExtRange::Misc(ref misc) => misc.clone(),
ExtRange::Domain(ref domain) => domain.clone(),
ExtRange::GeneProduct(ref gene_product) => gene_product.clone(),
}
}
fn cmp_ext_part(ext_part1: &ExtPart, ext_part2: &ExtPart,
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let ord = ext_part1.rel_type_display_name.cmp(&ext_part2.rel_type_display_name);
if ord == Ordering::Equal {
let ext_part1_str = string_from_ext_range(&ext_part1.ext_range, genes, terms);
let ext_part2_str = string_from_ext_range(&ext_part2.ext_range, genes, terms);
ext_part1_str.to_lowercase().cmp(&ext_part2_str.to_lowercase())
} else {
ord
}
}
// compare the extension up to the last common index
fn cmp_extension_prefix(cv_config: &CvConfig, ext1: &[ExtPart], ext2: &[ExtPart],
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let conf_rel_ranges = &cv_config.summary_relation_ranges_to_collect;
let is_grouping_rel_name =
|ext: &ExtPart| !conf_rel_ranges.contains(&ext.rel_type_name);
// put the extension that will be grouped in the summary at the end
// See: https://github.com/pombase/pombase-chado/issues/636
let (mut ext1_for_cmp, ext1_rest): (Vec<ExtPart>, Vec<ExtPart>) =
ext1.to_vec().into_iter().partition(&is_grouping_rel_name);
ext1_for_cmp.extend(ext1_rest.into_iter());
let (mut ext2_for_cmp, ext2_rest): (Vec<ExtPart>, Vec<ExtPart>) =
ext2.to_vec().into_iter().partition(&is_grouping_rel_name);
ext2_for_cmp.extend(ext2_rest.into_iter());
let iter = ext1_for_cmp.iter().zip(&ext2_for_cmp).enumerate();
for (_, (ext1_part, ext2_part)) in iter {
let ord = cmp_ext_part(ext1_part, ext2_part, genes, terms);
if ord != Ordering::Equal {
return ord
}
}
Ordering::Equal
}
fn cmp_extension(cv_config: &CvConfig, ext1: &[ExtPart], ext2: &[ExtPart],
genes: &UniquenameGeneMap,
terms: &TermIdDetailsMap) -> Ordering {
let cmp = cmp_extension_prefix(cv_config, ext1, ext2, genes, terms);
if cmp == Ordering::Equal {
ext1.len().cmp(&ext2.len())
} else {
cmp
}
}
fn cmp_genotypes(genotype1: &GenotypeDetails, genotype2: &GenotypeDetails) -> Ordering {
genotype1.display_uniquename.to_lowercase().cmp(&genotype2.display_uniquename.to_lowercase())
}
fn allele_display_name(allele: &AlleleShort) -> String {
let name = allele.name.clone().unwrap_or_else(|| "unnamed".into());
let allele_type = allele.allele_type.clone();
let description = allele.description.clone().unwrap_or_else(|| allele_type.clone());
if allele_type == "deletion" && name.ends_with("delta") ||
allele_type.starts_with("wild_type") && name.ends_with('+') {
let normalised_description = description.replace("[\\s_]+", "");
let normalised_allele_type = allele_type.replace("[\\s_]+", "");
if normalised_description != normalised_allele_type {
return name + "(" + description.as_str() + ")";
} else {
return name;
}
}
if allele_type == "deletion" {
name + "-" + description.as_str()
} else {
name + "-" + description.as_str() + "-" + &allele.allele_type
}
}
fn gene_display_name(gene: &GeneDetails) -> String {
if let Some(name) = gene.name.clone() {
name
} else {
gene.uniquename.clone()
}
}
pub fn make_genotype_display_name(genotype_expressed_alleles: &[ExpressedAllele],
allele_map: &UniquenameAlleleMap) -> String {
let mut allele_display_names: Vec<String> =
genotype_expressed_alleles.iter().map(|expressed_allele| {
let allele_short = allele_map.get(&expressed_allele.allele_uniquename).unwrap();
let mut display_name = allele_display_name(allele_short);
if allele_short.allele_type != "deletion" {
if display_name == "unnamed-unrecorded-unrecorded" {
display_name = format!("{}-{}", allele_short.gene_uniquename,
display_name);
}
if let Some(ref expression) = expressed_allele.expression {
display_name += &format!("-expression-{}", expression.to_lowercase());
}
}
display_name
}).collect();
allele_display_names.sort();
let joined_alleles = allele_display_names.join(" ");
str::replace(&joined_alleles, " ", "_")
}
fn make_phase(feature_loc: &Featureloc) -> Option<Phase> {
if let Some(phase) = feature_loc.phase {
match phase {
0 => Some(Phase::Zero),
1 => Some(Phase::One),
2 => Some(Phase::Two),
_ => panic!(),
}
} else {
None
}
}
fn make_location(chromosome_map: &ChrNameDetailsMap,
feat: &Feature) -> Option<ChromosomeLocation> {
let feature_locs = feat.featurelocs.borrow();
match feature_locs.get(0) {
Some(feature_loc) => {
let start_pos =
if feature_loc.fmin + 1 >= 1 {
(feature_loc.fmin + 1) as u32
} else {
panic!("start_pos less than 1");
};
let end_pos =
if feature_loc.fmax >= 1 {
feature_loc.fmax as u32
} else {
panic!("start_end less than 1");
};
let feature_uniquename = &feature_loc.srcfeature.uniquename;
let chr_short = make_chromosome_short(chromosome_map, feature_uniquename);
Some(ChromosomeLocation {
chromosome_name: chr_short.name,
start_pos,
end_pos,
strand: match feature_loc.strand {
1 => Strand::Forward,
-1 => Strand::Reverse,
_ => panic!(),
},
phase: make_phase(&feature_loc),
})
},
None => None,
}
}
fn complement_char(base: char) -> char {
match base {
'a' => 't',
'A' => 'T',
't' => 'a',
'T' => 'A',
'g' => 'c',
'G' => 'C',
'c' => 'g',
'C' => 'G',
_ => 'n',
}
}
fn rev_comp(residues: &str) -> Residues {
residues.chars()
.rev().map(complement_char)
.collect()
}
fn get_loc_residues(chr: &ChromosomeDetails,
loc: &ChromosomeLocation) -> Residues {
let start = (loc.start_pos - 1) as usize;
let end = loc.end_pos as usize;
let residues: Residues = chr.residues[start..end].into();
if loc.strand == Strand::Forward {
residues
} else {
rev_comp(&residues)
}
}
fn make_feature_short(chromosome_map: &ChrNameDetailsMap, feat: &Feature) -> FeatureShort {
let maybe_loc = make_location(chromosome_map, feat);
if let Some(loc) = maybe_loc {
if let Some(chr) = chromosome_map.get(&loc.chromosome_name) {
let residues = get_loc_residues(chr, &loc);
let feature_type = match &feat.feat_type.name as &str {
"five_prime_UTR" => FeatureType::FivePrimeUtr,
"pseudogenic_exon" | "exon" => FeatureType::Exon,
"three_prime_UTR" => FeatureType::ThreePrimeUtr,
"dg_repeat" => FeatureType::DGRepeat,
"dh_repeat" => FeatureType::DHRepeat,
"gap" => FeatureType::Gap,
"gene_group" => FeatureType::GeneGroup,
"long_terminal_repeat" => FeatureType::LongTerminalRepeat,
"low_complexity_region" => FeatureType::LowComplexityRegion,
"LTR_retrotransposon" => FeatureType::LTRRetrotransposon,
"mating_type_region" => FeatureType::MatingTypeRegion,
"nuclear_mt_pseudogene" => FeatureType::NuclearMtPseudogene,
"origin_of_replication" => FeatureType::OriginOfReplication,
"polyA_signal_sequence" => FeatureType::PolyASignalSequence,
"polyA_site" => FeatureType::PolyASite,
"promoter" => FeatureType::Promoter,
"region" => FeatureType::Region,
"regional_centromere" => FeatureType::RegionalCentromere,
"regional_centromere_central_core" => FeatureType::RegionalCentromereCentralCore,
"regional_centromere_inner_repeat_region" => FeatureType::RegionalCentromereInnerRepeatRegion,
"repeat_region" => FeatureType::RepeatRegion,
"TR_box" => FeatureType::TRBox,
"SNP" => FeatureType::SNP,
_ => panic!("can't handle feature type: {}", feat.feat_type.name),
};
FeatureShort {
feature_type,
uniquename: feat.uniquename.clone(),
location: loc,
residues,
}
} else {
panic!("can't find chromosome {}", loc.chromosome_name);
}
} else {
panic!("{} has no featureloc", feat.uniquename);
}
}
pub fn make_chromosome_short<'a>(chromosome_map: &'a ChrNameDetailsMap,
chromosome_name: &'a str) -> ChromosomeShort {
if let Some(chr) = chromosome_map.get(chromosome_name) {
chr.make_chromosome_short()
} else {
panic!("can't find chromosome: {}", chromosome_name);
}
}
fn make_gene_short<'b>(gene_map: &'b UniquenameGeneMap,
gene_uniquename: &'b str) -> GeneShort {
if let Some(gene_details) = gene_map.get(gene_uniquename) {
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_reference_short<'a>(reference_map: &'a UniquenameReferenceMap,
reference_uniquename: &str) -> Option<ReferenceShort> {
if reference_uniquename == "null" {
None
} else {
let reference_details = reference_map.get(reference_uniquename)
.unwrap_or_else(|| panic!("missing reference in make_reference_short(): {}",
reference_uniquename));
let reference_short = ReferenceShort::from_reference_details(reference_details);
Some(reference_short)
}
}
// compare two gene vectors which must be ordered vecs
fn cmp_gene_vec(genes: &UniquenameGeneMap,
gene_vec1: &[GeneUniquename],
gene_vec2: &[GeneUniquename]) -> Ordering {
let gene_short_vec1: Vec<GeneShort> =
gene_vec1.iter().map(|gene_uniquename: &String| {
make_gene_short(genes, gene_uniquename)
}).collect();
let gene_short_vec2: Vec<GeneShort> =
gene_vec2.iter().map(|gene_uniquename: &String| {
make_gene_short(genes, gene_uniquename)
}).collect();
gene_short_vec1.cmp(&gene_short_vec2)
}
lazy_static
! {
static ref MODIFICATION_RE: Regex = Regex::new(r"^(?P<aa>[A-Z])(?P<pos>\d+)$").unwrap();
}
fn cmp_residues(residue1: &Option<Residue>, residue2: &Option<Residue>) -> Ordering {
if let Some(ref res1) = *residue1 {
if let Some(ref res2) = *residue2 {
if let (Some(res1_captures), Some(res2_captures)) =
(MODIFICATION_RE.captures(res1), MODIFICATION_RE.captures(res2))
{
let res1_aa = res1_captures.name("aa").unwrap().as_str();
let res2_aa = res2_captures.name("aa").unwrap().as_str();
let aa_order = res1_aa.cmp(&res2_aa);
if aa_order == Ordering::Equal {
let res1_pos =
res1_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
let res2_pos =
res2_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
res1_pos.cmp(&res2_pos)
} else {
aa_order
}
} else {
res1.cmp(&res2)
}
} else {
Ordering::Less
}
} else {
if residue2.is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
}
}
pub fn cmp_ont_annotation_detail(cv_config: &CvConfig,
detail1: &OntAnnotationDetail,
detail2: &OntAnnotationDetail,
genes: &UniquenameGeneMap,
genotypes: &UniquenameGenotypeMap,
terms: &TermIdDetailsMap) -> Result<Ordering, String> {
if let Some(ref detail1_genotype_uniquename) = detail1.genotype {
if let Some(ref detail2_genotype_uniquename) = detail2.genotype {
let genotype1 = genotypes.get(detail1_genotype_uniquename).unwrap();
let genotype2 = genotypes.get(detail2_genotype_uniquename).unwrap();
let ord = cmp_genotypes(genotype1, genotype2);
if ord == Ordering::Equal {
Ok(cmp_extension(cv_config, &detail1.extension, &detail2.extension,
genes, terms))
} else {
Ok(ord)
}
} else {
Err(format!("comparing two OntAnnotationDetail but one has a genotype and
one a gene:\n{:?}\n{:?}\n", detail1, detail2))
}
} else {
if detail2.genotype.is_some() {
Err(format!("comparing two OntAnnotationDetail but one has a genotype and
one a gene:\n{:?}\n{:?}\n", detail1, detail2))
} else {
let ord = cmp_gene_vec(genes, &detail1.genes, &detail2.genes);
if ord == Ordering::Equal {
if let Some(ref sort_details_by) = cv_config.sort_details_by {
for sort_type in sort_details_by {
if sort_type == "modification" {
let res = cmp_residues(&detail1.residue, &detail2.residue);
if res != Ordering::Equal {
return Ok(res);
}
} else {
let res = cmp_extension(cv_config, &detail1.extension,
&detail2.extension, genes, terms);
if res != Ordering::Equal {
return Ok(res);
}
}
}
Ok(Ordering::Equal)
} else {
Ok(cmp_extension(cv_config, &detail1.extension, &detail2.extension,
genes, terms))
}
} else {
Ok(ord)
}
}
}
}
// Some ancestor terms are useful in the web code. This function uses the Config and returns
// the terms that might be useful.
fn get_possible_interesting_parents(config: &Config) -> HashSet<InterestingParent> {
let mut ret = HashSet::new();
for parent_conf in &config.interesting_parents {
ret.insert(parent_conf.clone());
}
for ext_conf in &config.extension_display_names {
if let Some(ref conf_termid) = ext_conf.if_descendent_of {
ret.insert(InterestingParent {
termid: conf_termid.clone(),
rel_name: "is_a".into(),
});
}
}
for go_slim_conf in &config.go_slim_terms {
for rel_name in &DESCENDANT_REL_NAMES {
ret.insert(InterestingParent {
termid: go_slim_conf.termid.clone(),
rel_name: (*rel_name).to_owned(),
});
}
}
for query_data_go_conf in &config.query_data_config.go_components {
for rel_name in &DESCENDANT_REL_NAMES {
ret.insert(InterestingParent {
termid: query_data_go_conf.clone(),
rel_name: (*rel_name).to_owned(),
});
}
}
ret.insert(InterestingParent {
termid: config.viability_terms.viable.clone(),
rel_name: "is_a".into(),
});
ret.insert(InterestingParent {
termid: config.viability_terms.inviable.clone(),
rel_name: "is_a".into(),
});
for (cv_name, conf) in &config.cv_config {
for filter in &conf.filters {
for category in &filter.term_categories {
for ancestor in &category.ancestors {
for config_rel_name in &DESCENDANT_REL_NAMES {
if *config_rel_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&cv_name.as_str()) {
continue;
}
ret.insert(InterestingParent {
termid: ancestor.clone(),
rel_name: String::from(*config_rel_name),
});
}
}
}
}
for split_by_parent_config in &conf.split_by_parents {
for ancestor in &split_by_parent_config.termids {
let ancestor_termid =
if ancestor.starts_with("NOT ") {
ancestor[4..].to_owned()
} else {
ancestor.clone()
};
ret.insert(InterestingParent {
termid: ancestor_termid,
rel_name: "is_a".into(),
});
}
}
}
ret
}
const MAX_RECENT_REFS: usize = 20;
fn make_recently_added(references_map: &UniquenameReferenceMap,
all_ref_uniquenames: &[String]) -> Vec<ReferenceShort> {
let mut date_sorted_pub_uniquenames = all_ref_uniquenames.to_owned();
{
let ref_added_date_cmp =
|ref_uniquename1: &ReferenceUniquename, ref_uniquename2: &ReferenceUniquename| {
let ref1 = references_map.get(ref_uniquename1).unwrap();
let ref2 = references_map.get(ref_uniquename2).unwrap();
if let Some(ref ref1_added_date) = ref1.canto_added_date {
if let Some(ref ref2_added_date) = ref2.canto_added_date {
cmp_str_dates(ref1_added_date, ref2_added_date).reverse()
} else {
Ordering::Less
}
} else {
if ref2.canto_added_date.is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
}
};
date_sorted_pub_uniquenames.sort_by(ref_added_date_cmp);
}
let recently_added_iter =
date_sorted_pub_uniquenames.iter().take(MAX_RECENT_REFS);
let mut recently_added: Vec<ReferenceShort> = vec![];
for ref_uniquename in recently_added_iter {
let ref_short_maybe = make_reference_short(references_map, ref_uniquename);
if let Some(ref_short) = ref_short_maybe {
recently_added.push(ref_short);
}
}
recently_added
}
fn make_canto_curated(references_map: &UniquenameReferenceMap,
all_ref_uniquenames: &[String])
-> (Vec<ReferenceShort>, Vec<ReferenceShort>, Vec<ReferenceShort>) {
let mut sorted_pub_uniquenames: Vec<ReferenceUniquename> =
all_ref_uniquenames.iter()
.filter(|ref_uniquename| {
let reference = references_map.get(*ref_uniquename).unwrap();
(reference.canto_first_approved_date.is_some() ||
reference.canto_session_submitted_date.is_some()) &&
reference.canto_curator_role.is_some()
})
.cloned()
.collect();
{
let pub_date_cmp =
|ref_uniquename1: &ReferenceUniquename, ref_uniquename2: &ReferenceUniquename| {
let ref1 = references_map.get(ref_uniquename1).unwrap();
let ref2 = references_map.get(ref_uniquename2).unwrap();
// use first approval date, but fall back to the most recent approval date
let ref1_date =
ref1.canto_first_approved_date.as_ref()
.unwrap_or_else(|| ref1.canto_approved_date.as_ref()
.unwrap_or_else(|| ref1.canto_session_submitted_date.
as_ref().unwrap()));
let ref2_date =
ref2.canto_first_approved_date.as_ref()
.unwrap_or_else(|| ref2.canto_approved_date.as_ref()
.unwrap_or_else(|| ref2.canto_session_submitted_date.
as_ref().unwrap()));
cmp_str_dates(ref2_date, ref1_date)
};
sorted_pub_uniquenames.sort_by(pub_date_cmp);
}
let mut recent_admin_curated = vec![];
let mut recent_community_curated = vec![];
let mut all_community_curated = vec![];
let ref_uniquename_iter = sorted_pub_uniquenames.iter();
for ref_uniquename in ref_uniquename_iter {
let reference = references_map.get(ref_uniquename).unwrap();
if reference.canto_curator_role == Some("community".into()) {
let ref_short = make_reference_short(references_map, ref_uniquename).unwrap();
all_community_curated.push(ref_short.clone());
if recent_community_curated.len() <= MAX_RECENT_REFS {
recent_community_curated.push(ref_short);
}
} else {
if recent_admin_curated.len() <= MAX_RECENT_REFS {
let ref_short = make_reference_short(references_map, ref_uniquename).unwrap();
recent_admin_curated.push(ref_short);
}
}
}
(recent_admin_curated, recent_community_curated, all_community_curated)
}
fn add_introns_to_transcript(chromosome: &ChromosomeDetails,
transcript_uniquename: &str, parts: &mut Vec<FeatureShort>) {
let mut new_parts: Vec<FeatureShort> = vec![];
let mut intron_count = 0;
for part in parts.drain(0..) {
let mut maybe_new_intron = None;
if let Some(prev_part) = new_parts.last() {
let intron_start = prev_part.location.end_pos + 1;
let intron_end = part.location.start_pos - 1;
if intron_start > intron_end {
if intron_start > intron_end + 1 {
println!("no gap between exons at {}..{} in {}", intron_start, intron_end,
transcript_uniquename);
}
// if intron_start == intron_end-1 then it is a one base overlap that
// represents a frameshift in the reference See:
// https://github.com/pombase/curation/issues/1453#issuecomment-303214177
} else {
intron_count += 1;
let new_intron_loc = ChromosomeLocation {
chromosome_name: prev_part.location.chromosome_name.clone(),
start_pos: intron_start,
end_pos: intron_end,
strand: prev_part.location.strand.clone(),
phase: None,
};
let intron_uniquename =
format!("{}:intron:{}", transcript_uniquename, intron_count);
let intron_residues = get_loc_residues(chromosome, &new_intron_loc);
let intron_type =
if prev_part.feature_type == FeatureType::Exon &&
part.feature_type == FeatureType::Exon {
FeatureType::CdsIntron
} else {
if prev_part.feature_type == FeatureType::FivePrimeUtr {
FeatureType::FivePrimeUtrIntron
} else {
FeatureType::ThreePrimeUtrIntron
}
};
maybe_new_intron = Some(FeatureShort {
feature_type: intron_type,
uniquename: intron_uniquename,
location: new_intron_loc,
residues: intron_residues,
});
}
}
if let Some(new_intron) = maybe_new_intron {
new_parts.push(new_intron);
}
new_parts.push(part);
}
*parts = new_parts;
}
fn validate_transcript_parts(transcript_uniquename: &str, parts: &[FeatureShort]) {
let mut seen_exon = false;
for part in parts {
if part.feature_type == FeatureType::Exon {
seen_exon = true;
break;
}
}
if !seen_exon {
panic!("transcript has no exons: {}", transcript_uniquename);
}
if parts[0].feature_type != FeatureType::Exon {
for i in 1..parts.len() {
let part = &parts[i];
if part.feature_type == FeatureType::Exon {
let last_utr_before_exons = &parts[i-1];
let first_exon = &parts[i];
if last_utr_before_exons.location.end_pos + 1 != first_exon.location.start_pos {
println!("{} and exon don't meet up: {} at pos {}",
last_utr_before_exons.feature_type, transcript_uniquename,
last_utr_before_exons.location.end_pos);
}
break;
} else {
if part.location.strand == Strand::Forward {
if part.feature_type != FeatureType::FivePrimeUtr {
println!("{:?}", parts);
panic!("wrong feature type '{}' before exons in {}",
part.feature_type, transcript_uniquename);
}
} else {
if part.feature_type != FeatureType::ThreePrimeUtr {
println!("{:?}", parts);
panic!("wrong feature type '{}' after exons in {}",
part.feature_type, transcript_uniquename);
}
}
}
}
}
let last_part = parts.last().unwrap();
if last_part.feature_type != FeatureType::Exon {
for i in (0..parts.len()-1).rev() {
let part = &parts[i];
if part.feature_type == FeatureType::Exon {
let first_utr_after_exons = &parts[i+1];
let last_exon = &parts[i];
if last_exon.location.end_pos + 1 != first_utr_after_exons.location.start_pos {
println!("{} and exon don't meet up: {} at pos {}",
first_utr_after_exons.feature_type, transcript_uniquename,
first_utr_after_exons.location.end_pos);
}
break;
} else {
if part.location.strand == Strand::Forward {
if part.feature_type != FeatureType::ThreePrimeUtr {
panic!("wrong feature type '{}' before exons in {}",
part.feature_type, transcript_uniquename);
}
} else {
if part.feature_type != FeatureType::FivePrimeUtr {
panic!("wrong feature type '{}' after exons in {}",
part.feature_type, transcript_uniquename);
}
}
}
}
}
}
impl <'a> WebDataBuild<'a> {
pub fn new(raw: &'a Raw, domain_data: &'a HashMap<UniprotIdentifier, UniprotResult>,
config: &'a Config) -> WebDataBuild<'a>
{
WebDataBuild {
raw,
domain_data,
config,
genes: BTreeMap::new(),
genotypes: HashMap::new(),
genotype_backgrounds: HashMap::new(),
alleles: HashMap::new(),
other_features: HashMap::new(),
terms: HashMap::new(),
chromosomes: BTreeMap::new(),
references: HashMap::new(),
all_ont_annotations: HashMap::new(),
all_not_ont_annotations: HashMap::new(),
recent_references: RecentReferences {
admin_curated: vec![],
community_curated: vec![],
pubmed: vec![],
},
all_community_curated: vec![],
genes_of_transcripts: HashMap::new(),
transcripts_of_polypeptides: HashMap::new(),
parts_of_transcripts: HashMap::new(),
genes_of_alleles: HashMap::new(),
alleles_of_genotypes: HashMap::new(),
parts_of_extensions: HashMap::new(),
base_term_of_extensions: HashMap::new(),
children_by_termid: HashMap::new(),
dbxrefs_of_features: HashMap::new(),
possible_interesting_parents: get_possible_interesting_parents(config),
term_subsets: HashMap::new(),
gene_subsets: HashMap::new(),
annotation_details: HashMap::new(),
}
}
fn add_ref_to_hash(&self,
seen_references: &mut HashMap<String, ReferenceShortOptionMap>,
identifier: &str,
maybe_reference_uniquename: &Option<ReferenceUniquename>) {
if let Some(reference_uniquename) = maybe_reference_uniquename {
seen_references
.entry(identifier.into())
.or_insert_with(HashMap::new)
.insert(reference_uniquename.clone(), None);
}
}
fn add_gene_to_hash(&self,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
other_gene_uniquename: &GeneUniquename) {
seen_genes
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(other_gene_uniquename.clone(), None);
}
fn add_genotype_to_hash(&self,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
genotype_uniquename: &str) {
let genotype_short = self.make_genotype_short(&genotype_uniquename);
for expressed_allele in &genotype_short.expressed_alleles {
self.add_allele_to_hash(seen_alleles, seen_genes, identifier,
expressed_allele.allele_uniquename.clone());
}
seen_genotypes
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(genotype_uniquename.to_owned(), genotype_short);
}
fn add_allele_to_hash(&self,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
identifier: &str,
allele_uniquename: AlleleUniquename) -> AlleleShort {
let allele_short = self.make_allele_short(&allele_uniquename);
{
let allele_gene_uniquename = &allele_short.gene_uniquename;
self.add_gene_to_hash(seen_genes, identifier, allele_gene_uniquename);
seen_alleles
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(allele_uniquename, allele_short.clone());
}
allele_short
}
fn add_term_to_hash(&self,
seen_terms: &mut HashMap<TermId, TermShortOptionMap>,
identifier: &str,
other_termid: &TermId) {
seen_terms
.entry(identifier.to_owned())
.or_insert_with(HashMap::new)
.insert(other_termid.clone(), None);
}
fn get_gene<'b>(&'b self, gene_uniquename: &'b str) -> &'b GeneDetails {
if let Some(gene_details) = self.genes.get(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn get_gene_mut<'b>(&'b mut self, gene_uniquename: &'b str) -> &'b mut GeneDetails {
if let Some(gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_gene_short(&self, gene_uniquename: &str) -> GeneShort {
let gene_details = self.get_gene(gene_uniquename);
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
}
fn make_gene_summary(&self, gene_uniquename: &str) -> GeneSummary {
let gene_details = self.get_gene(gene_uniquename);
let synonyms =
gene_details.synonyms.iter()
.filter(|synonym| synonym.synonym_type == "exact")
.map(|synonym| synonym.name.clone())
.collect::<Vec<String>>();
let mut ortholog_ids =
gene_details.ortholog_annotations.iter()
.map(|ortholog_annotation| {
IdAndOrganism {
identifier: ortholog_annotation.ortholog_uniquename.clone(),
taxonid: ortholog_annotation.ortholog_taxonid,
}
})
.collect::<Vec<IdAndOrganism>>();
for ortholog_annotation in &gene_details.ortholog_annotations {
let orth_uniquename = &ortholog_annotation.ortholog_uniquename;
if let Some(orth_gene) =
self.genes.get(orth_uniquename) {
if let Some(ref orth_name) = orth_gene.name {
let id_and_org =IdAndOrganism {
identifier: orth_name.clone(),
taxonid: ortholog_annotation.ortholog_taxonid,
};
ortholog_ids.push(id_and_org);
}
} else {
panic!("missing GeneShort for: {:?}", orth_uniquename);
}
}
GeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
uniprot_identifier: gene_details.uniprot_identifier.clone(),
synonyms,
orthologs: ortholog_ids,
feature_type: gene_details.feature_type.clone(),
taxonid: gene_details.taxonid,
location: gene_details.location.clone(),
}
}
fn make_api_gene_summary(&self, gene_uniquename: &str) -> APIGeneSummary {
let gene_details = self.get_gene(gene_uniquename);
let synonyms =
gene_details.synonyms.iter()
.filter(|synonym| synonym.synonym_type == "exact")
.map(|synonym| synonym.name.clone())
.collect::<Vec<String>>();
let exon_count =
if let Some(transcript) = gene_details.transcripts.get(0) {
let mut count = 0;
for part in &transcript.parts {
if part.feature_type == FeatureType::Exon {
count += 1;
}
}
count
} else {
0
};
APIGeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
uniprot_identifier: gene_details.uniprot_identifier.clone(),
exact_synonyms: synonyms,
dbxrefs: gene_details.dbxrefs.clone(),
location: gene_details.location.clone(),
transcripts: gene_details.transcripts.clone(),
tm_domain_count: gene_details.tm_domain_coords.len(),
exon_count,
}
}
fn make_term_short(&self, termid: &str) -> TermShort {
if let Some(term_details) = self.terms.get(termid) {
TermShort::from_term_details(&term_details)
} else {
panic!("can't find TermDetails for termid: {}", termid)
}
}
fn add_characterisation_status(&mut self, gene_uniquename: &str,
cvterm_name: &str) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
gene_details.characterisation_status = Some(cvterm_name.into());
}
fn add_gene_product(&mut self, gene_uniquename: &str, product: &str) {
let gene_details = self.get_gene_mut(gene_uniquename);
gene_details.product = Some(product.to_owned());
}
fn add_name_description(&mut self, gene_uniquename: &str, name_description: &str) {
let gene_details = self.get_gene_mut(gene_uniquename);
gene_details.name_descriptions.push(name_description.into());
}
fn add_annotation(&mut self, cvterm: &Cvterm, is_not: bool,
annotation_template: OntAnnotationDetail) {
let termid =
match self.base_term_of_extensions.get(&cvterm.termid()) {
Some(base_termid) => base_termid.clone(),
None => cvterm.termid(),
};
let extension_parts =
match self.parts_of_extensions.get(&cvterm.termid()) {
Some(parts) => parts.clone(),
None => vec![],
};
let mut new_extension = extension_parts.clone();
let mut existing_extensions = annotation_template.extension.clone();
new_extension.append(&mut existing_extensions);
{
let compare_ext_part_func =
|e1: &ExtPart, e2: &ExtPart| compare_ext_part_with_config(self.config, e1, e2);
new_extension.sort_by(compare_ext_part_func);
};
let ont_annotation_detail =
OntAnnotationDetail {
extension: new_extension,
.. annotation_template
};
let annotation_map = if is_not {
&mut self.all_not_ont_annotations
} else {
&mut self.all_ont_annotations
};
let entry = annotation_map.entry(termid.clone());
entry.or_insert_with(Vec::new).push(ont_annotation_detail.id);
self.annotation_details.insert(ont_annotation_detail.id,
ont_annotation_detail);
}
fn process_dbxrefs(&mut self) {
let mut map = HashMap::new();
for feature_dbxref in &self.raw.feature_dbxrefs {
let feature = &feature_dbxref.feature;
let dbxref = &feature_dbxref.dbxref;
map.entry(feature.uniquename.clone())
.or_insert_with(HashSet::new)
.insert(dbxref.identifier());
}
self.dbxrefs_of_features = map;
}
fn process_references(&mut self) {
let mut all_uniquenames = vec![];
for rc_publication in &self.raw.publications {
let reference_uniquename = &rc_publication.uniquename;
let mut pubmed_authors: Option<String> = None;
let mut pubmed_publication_date: Option<String> = None;
let mut pubmed_abstract: Option<String> = None;
let mut canto_triage_status: Option<String> = None;
let mut canto_curator_role: Option<String> = None;
let mut canto_curator_name: Option<String> = None;
let mut canto_first_approved_date: Option<String> = None;
let mut canto_approved_date: Option<String> = None;
let mut canto_added_date: Option<String> = None;
let mut canto_session_submitted_date: Option<String> = None;
for prop in rc_publication.publicationprops.borrow().iter() {
match &prop.prop_type.name as &str {
"pubmed_publication_date" =>
pubmed_publication_date = Some(prop.value.clone()),
"pubmed_authors" =>
pubmed_authors = Some(prop.value.clone()),
"pubmed_abstract" =>
pubmed_abstract = Some(prop.value.clone()),
"canto_triage_status" =>
canto_triage_status = Some(prop.value.clone()),
"canto_curator_role" =>
canto_curator_role = Some(prop.value.clone()),
"canto_curator_name" =>
canto_curator_name = Some(prop.value.clone()),
"canto_first_approved_date" =>
canto_first_approved_date = Some(prop.value.clone()),
"canto_approved_date" =>
canto_approved_date = Some(prop.value.clone()),
"canto_added_date" =>
canto_added_date = Some(prop.value.clone()),
"canto_session_submitted_date" =>
canto_session_submitted_date = Some(prop.value.clone()),
_ => ()
}
}
let mut authors_abbrev = None;
let mut publication_year = None;
if let Some(authors) = pubmed_authors.clone() {
if authors.contains(',') {
let author_re = Regex::new(r"^(?P<f>[^,]+),.*$").unwrap();
let replaced: String =
author_re.replace_all(&authors, "$f et al.").into();
authors_abbrev = Some(replaced);
} else {
authors_abbrev = Some(authors.clone());
}
}
if let Some(publication_date) = pubmed_publication_date.clone() {
let date_re = Regex::new(r"^(.* )?(?P<y>\d\d\d\d)$").unwrap();
publication_year = Some(date_re.replace_all(&publication_date, "$y").into());
}
let mut approved_date = canto_first_approved_date.clone();
if approved_date.is_none() {
approved_date = canto_approved_date.clone();
}
if approved_date.is_none() {
approved_date = canto_session_submitted_date.clone();
}
approved_date =
if let Some(date) = approved_date {
let re = Regex::new(r"^(?P<date>\d\d\d\d-\d\d-\d\d).*").unwrap();
Some(re.replace_all(&date, "$date").into())
} else {
None
};
self.references.insert(reference_uniquename.clone(),
ReferenceDetails {
uniquename: reference_uniquename.clone(),
title: rc_publication.title.clone(),
citation: rc_publication.miniref.clone(),
pubmed_abstract: pubmed_abstract.clone(),
authors: pubmed_authors.clone(),
authors_abbrev,
pubmed_publication_date: pubmed_publication_date.clone(),
canto_triage_status,
canto_curator_role,
canto_curator_name,
canto_first_approved_date,
canto_approved_date,
canto_session_submitted_date,
canto_added_date,
approved_date,
publication_year,
cv_annotations: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
});
if pubmed_publication_date.is_some() {
all_uniquenames.push(reference_uniquename.clone());
}
}
let (recent_admin_curated, recent_community_curated,
all_community_curated) =
make_canto_curated(&self.references, &all_uniquenames);
let recent_references = RecentReferences {
pubmed: make_recently_added(&self.references, &all_uniquenames),
admin_curated: recent_admin_curated,
community_curated: recent_community_curated,
};
self.recent_references = recent_references;
self.all_community_curated = all_community_curated;
}
// make maps from genes to transcript, transcripts to polypeptide,
// exon, intron, UTRs
fn make_feature_rel_maps(&mut self) {
for feature_rel in &self.raw.feature_relationships {
let subject_type_name = &feature_rel.subject.feat_type.name;
let rel_name = &feature_rel.rel_type.name;
let object_type_name = &feature_rel.object.feat_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
if TRANSCRIPT_FEATURE_TYPES.contains(&subject_type_name.as_str()) &&
rel_name == "part_of" && is_gene_type(object_type_name) {
self.genes_of_transcripts.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "polypeptide" &&
rel_name == "derives_from" &&
object_type_name == "mRNA" {
self.transcripts_of_polypeptides.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "allele" {
if feature_rel.rel_type.name == "instance_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_alleles.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if feature_rel.rel_type.name == "part_of" &&
object_type_name == "genotype" {
let expression = get_feat_rel_expression(&feature_rel.subject, feature_rel);
let allele_and_expression =
ExpressedAllele {
allele_uniquename: subject_uniquename.clone(),
expression,
};
let entry = self.alleles_of_genotypes.entry(object_uniquename.clone());
entry.or_insert_with(Vec::new).push(allele_and_expression);
continue;
}
}
if TRANSCRIPT_PART_TYPES.contains(&subject_type_name.as_str()) {
let entry = self.parts_of_transcripts.entry(object_uniquename.clone());
let part = make_feature_short(&self.chromosomes, &feature_rel.subject);
entry.or_insert_with(Vec::new).push(part);
}
}
}
fn get_feature_dbxrefs(&self, feature: &Feature) -> HashSet<String> {
if let Some(dbxrefs) = self.dbxrefs_of_features.get(&feature.uniquename) {
dbxrefs.clone()
} else {
HashSet::new()
}
}
fn store_gene_details(&mut self, feat: &Feature) {
let maybe_location = make_location(&self.chromosomes, feat);
if let Some(ref location) = maybe_location {
if let Some(ref mut chr) = self.chromosomes.get_mut(&location.chromosome_name) {
chr.gene_uniquenames.push(feat.uniquename.clone());
}
}
let organism = make_organism(&feat.organism);
let dbxrefs = self.get_feature_dbxrefs(feat);
let mut orfeome_identifier = None;
for dbxref in &dbxrefs {
if dbxref.starts_with("SPD:") {
orfeome_identifier = Some(String::from(&dbxref[4..]));
}
}
let mut uniprot_identifier = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "uniprot_identifier" {
uniprot_identifier = prop.value.clone();
break;
}
}
let (interpro_matches, tm_domain_coords) =
if let Some(ref uniprot_identifier) = uniprot_identifier {
if let Some(result) = self.domain_data.get(uniprot_identifier) {
let tm_domain_matches = result.tmhmm_matches.iter()
.map(|tm_match| (tm_match.start, tm_match.end))
.collect::<Vec<_>>();
(result.interpro_matches.clone(), tm_domain_matches)
} else {
(vec![], vec![])
}
} else {
(vec![], vec![])
};
let gene_feature = GeneDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
taxonid: organism.taxonid,
product: None,
deletion_viability: DeletionViability::Unknown,
uniprot_identifier,
interpro_matches,
tm_domain_coords,
orfeome_identifier,
name_descriptions: vec![],
synonyms: vec![],
dbxrefs,
feature_type: feat.feat_type.name.clone(),
characterisation_status: None,
location: maybe_location,
gene_neighbourhood: vec![],
cv_annotations: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
target_of_annotations: vec![],
transcripts: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
};
self.genes.insert(feat.uniquename.clone(), gene_feature);
}
fn get_transcript_parts(&mut self, transcript_uniquename: &str) -> Vec<FeatureShort> {
let mut parts = self.parts_of_transcripts.remove(transcript_uniquename)
.expect("can't find transcript");
if parts.is_empty() {
panic!("transcript has no parts: {}", transcript_uniquename);
}
let part_cmp = |a: &FeatureShort, b: &FeatureShort| {
a.location.start_pos.cmp(&b.location.start_pos)
};
parts.sort_by(&part_cmp);
validate_transcript_parts(transcript_uniquename, &parts);
let chr_name = &parts[0].location.chromosome_name.clone();
if let Some(chromosome) = self.chromosomes.get(chr_name) {
add_introns_to_transcript(chromosome, transcript_uniquename, &mut parts);
} else {
panic!("can't find chromosome details for: {}", chr_name);
}
if parts[0].location.strand == Strand::Reverse {
parts.reverse();
}
parts
}
fn store_transcript_details(&mut self, feat: &Feature) {
let transcript_uniquename = feat.uniquename.clone();
let parts = self.get_transcript_parts(&transcript_uniquename);
if parts.is_empty() {
panic!("transcript has no parts");
}
let mut transcript_start = u32::MAX;
let mut transcript_end = 0;
for part in &parts {
if part.location.start_pos < transcript_start {
transcript_start = part.location.start_pos;
}
if part.location.end_pos > transcript_end {
transcript_end = part.location.end_pos;
}
}
// use the first part as a template to get the chromosome details
let transcript_location =
ChromosomeLocation {
start_pos: transcript_start,
end_pos: transcript_end,
phase: None,
.. parts[0].location.clone()
};
let maybe_cds_location =
if feat.feat_type.name == "mRNA" {
let mut cds_start = u32::MAX;
let mut cds_end = 0;
for part in &parts {
if part.feature_type == FeatureType::Exon {
if part.location.start_pos < cds_start {
cds_start = part.location.start_pos;
}
if part.location.end_pos > cds_end {
cds_end = part.location.end_pos;
}
}
}
if cds_end == 0 {
None
} else {
if let Some(mrna_location) = feat.featurelocs.borrow().get(0) {
let first_part_loc = &parts[0].location;
Some(ChromosomeLocation {
chromosome_name: first_part_loc.chromosome_name.clone(),
start_pos: cds_start,
end_pos: cds_end,
strand: first_part_loc.strand.clone(),
phase: make_phase(&mrna_location),
})
} else {
None
}
}
} else {
None
};
let transcript = TranscriptDetails {
uniquename: transcript_uniquename.clone(),
location: transcript_location,
transcript_type: feat.feat_type.name.clone(),
parts,
protein: None,
cds_location: maybe_cds_location,
};
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&transcript_uniquename) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
if gene_details.feature_type != "pseudogene" {
gene_details.feature_type =
transcript.transcript_type.clone() + " " + &gene_details.feature_type;
}
gene_details.transcripts.push(transcript);
} else {
panic!("can't find gene for transcript: {}", transcript_uniquename);
}
}
fn store_protein_details(&mut self, feat: &Feature) {
if let Some(residues) = feat.residues.clone() {
let protein_uniquename = feat.uniquename.clone();
let mut molecular_weight = None;
let mut average_residue_weight = None;
let mut charge_at_ph7 = None;
let mut isoelectric_point = None;
let mut codon_adaptation_index = None;
let parse_prop_as_f32 = |p: &Option<String>| {
if let Some(prop_value) = p.clone() {
let maybe_value = prop_value.parse();
if let Ok(parsed_prop) = maybe_value {
Some(parsed_prop)
} else {
println!("{}: couldn't parse {} as f32",
feat.uniquename, prop_value);
None
}
} else {
None
}
};
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "molecular_weight" {
if let Some(value) = parse_prop_as_f32(&prop.value) {
molecular_weight = Some(value / 1000.0);
}
}
if prop.prop_type.name == "average_residue_weight" {
if let Some(value) = parse_prop_as_f32(&prop.value) {
average_residue_weight = Some(value / 1000.0);
}
}
if prop.prop_type.name == "charge_at_ph7" {
charge_at_ph7 = parse_prop_as_f32(&prop.value);
}
if prop.prop_type.name == "isoelectric_point" {
isoelectric_point = parse_prop_as_f32(&prop.value);
}
if prop.prop_type.name == "codon_adaptation_index" {
codon_adaptation_index = parse_prop_as_f32(&prop.value);
}
}
if molecular_weight.is_none() {
panic!("{} has no molecular_weight", feat.uniquename)
}
let protein = ProteinDetails {
uniquename: feat.uniquename.clone(),
sequence: residues,
molecular_weight: molecular_weight.unwrap(),
average_residue_weight: average_residue_weight.unwrap(),
charge_at_ph7: charge_at_ph7.unwrap(),
isoelectric_point: isoelectric_point.unwrap(),
codon_adaptation_index: codon_adaptation_index.unwrap(),
};
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&protein_uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
if gene_details.transcripts.len() > 1 {
panic!("unimplemented - can't handle multiple transcripts for: {}",
gene_uniquename);
} else {
if gene_details.transcripts.is_empty() {
panic!("gene has no transcript: {}", gene_uniquename);
} else {
gene_details.transcripts[0].protein = Some(protein);
}
}
} else {
panic!("can't find gene for transcript: {}", transcript_uniquename);
}
} else {
panic!("can't find transcript of polypeptide: {}", protein_uniquename)
}
} else {
panic!("no residues for protein: {}", feat.uniquename);
}
}
fn store_chromosome_details(&mut self, feat: &Feature) {
let mut ena_identifier = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "ena_id" {
ena_identifier = prop.value.clone()
}
}
if feat.residues.is_none() {
panic!("{:?}", feat.uniquename);
}
let org = make_organism(&feat.organism);
let chr = ChromosomeDetails {
name: feat.uniquename.clone(),
residues: feat.residues.clone().unwrap(),
ena_identifier: ena_identifier.unwrap(),
gene_uniquenames: vec![],
taxonid: org.taxonid,
};
self.chromosomes.insert(feat.uniquename.clone(), chr);
}
fn store_genotype_details(&mut self, feat: &Feature) {
let mut expressed_alleles =
self.alleles_of_genotypes[&feat.uniquename].clone();
let genotype_display_uniquename =
make_genotype_display_name(&expressed_alleles, &self.alleles);
{
let allele_cmp = |allele1: &ExpressedAllele, allele2: &ExpressedAllele| {
let allele1_display_name =
allele_display_name(&self.alleles[&allele1.allele_uniquename]);
let allele2_display_name =
allele_display_name(&self.alleles[&allele2.allele_uniquename]);
allele1_display_name.cmp(&allele2_display_name)
};
expressed_alleles.sort_by(&allele_cmp);
}
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "genotype_background" {
if let Some(ref background) = prop.value {
self.genotype_backgrounds.insert(feat.uniquename.clone(),
background.clone());
}
}
}
self.genotypes.insert(genotype_display_uniquename.clone(),
GenotypeDetails {
display_uniquename: genotype_display_uniquename,
name: feat.name.clone(),
expressed_alleles,
cv_annotations: HashMap::new(),
genes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
});
}
fn store_allele_details(&mut self, feat: &Feature) {
let mut allele_type = None;
let mut description = None;
for prop in feat.featureprops.borrow().iter() {
match &prop.prop_type.name as &str {
"allele_type" =>
allele_type = prop.value.clone(),
"description" =>
description = prop.value.clone(),
_ => ()
}
}
if allele_type.is_none() {
panic!("no allele_type cvtermprop for {}", &feat.uniquename);
}
let gene_uniquename =
self.genes_of_alleles[&feat.uniquename].clone();
let allele_details = AlleleShort {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
gene_uniquename,
allele_type: allele_type.unwrap(),
description,
};
self.alleles.insert(feat.uniquename.clone(), allele_details);
}
fn process_chromosome_features(&mut self) {
// we need to process all chromosomes before other featuers
for feat in &self.raw.features {
if feat.feat_type.name == "chromosome" {
self.store_chromosome_details(feat);
}
}
}
fn process_features(&mut self) {
// we need to process all genes before transcripts
for feat in &self.raw.features {
if feat.feat_type.name == "gene" || feat.feat_type.name == "pseudogene" {
self.store_gene_details(feat);
}
}
for feat in &self.raw.features {
if TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) {
self.store_transcript_details(feat)
}
}
for feat in &self.raw.features {
if feat.feat_type.name == "polypeptide"{
self.store_protein_details(feat);
}
}
for feat in &self.raw.features {
if !TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) &&
!TRANSCRIPT_PART_TYPES.contains(&feat.feat_type.name.as_str()) &&
!HANDLED_FEATURE_TYPES.contains(&feat.feat_type.name.as_str())
{
// for now, ignore features without locations
if feat.featurelocs.borrow().len() > 0 {
let feature_short = make_feature_short(&self.chromosomes, &feat);
self.other_features.insert(feat.uniquename.clone(), feature_short);
}
}
}
}
fn add_interesting_parents(&mut self) {
let mut interesting_parents_by_termid: HashMap<String, HashSet<String>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if self.is_interesting_parent(&object_termid, &rel_term_name) {
interesting_parents_by_termid
.entry(subject_termid.clone())
.or_insert_with(HashSet::new)
.insert(object_termid);
};
}
for (termid, interesting_parents) in interesting_parents_by_termid {
let term_details = self.terms.get_mut(&termid).unwrap();
term_details.interesting_parents = interesting_parents;
}
}
fn process_allele_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "allele" {
self.store_allele_details(feat);
}
}
}
fn process_genotype_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "genotype" {
self.store_genotype_details(feat);
}
}
}
fn add_gene_neighbourhoods(&mut self) {
struct GeneAndLoc {
gene_uniquename: String,
loc: ChromosomeLocation,
};
let mut genes_and_locs: Vec<GeneAndLoc> = vec![];
for gene_details in self.genes.values() {
if let Some(ref location) = gene_details.location {
genes_and_locs.push(GeneAndLoc {
gene_uniquename: gene_details.uniquename.clone(),
loc: location.clone(),
});
}
}
let cmp = |a: &GeneAndLoc, b: &GeneAndLoc| {
let order = a.loc.chromosome_name.cmp(&b.loc.chromosome_name);
if order == Ordering::Equal {
a.loc.start_pos.cmp(&b.loc.start_pos)
} else {
order
}
};
genes_and_locs.sort_by(cmp);
for (i, this_gene_and_loc) in genes_and_locs.iter().enumerate() {
let mut nearby_genes: Vec<GeneShort> = vec![];
if i > 0 {
let start_index =
if i > GENE_NEIGHBOURHOOD_DISTANCE {
i - GENE_NEIGHBOURHOOD_DISTANCE
} else {
0
};
for back_index in (start_index..i).rev() {
let back_gene_and_loc = &genes_and_locs[back_index];
if back_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let back_gene_short = self.make_gene_short(&back_gene_and_loc.gene_uniquename);
nearby_genes.insert(0, back_gene_short);
}
}
let gene_short = self.make_gene_short(&this_gene_and_loc.gene_uniquename);
nearby_genes.push(gene_short);
if i < genes_and_locs.len() - 1 {
let end_index =
if i + GENE_NEIGHBOURHOOD_DISTANCE >= genes_and_locs.len() {
genes_and_locs.len()
} else {
i + GENE_NEIGHBOURHOOD_DISTANCE + 1
};
for forward_index in i+1..end_index {
let forward_gene_and_loc = &genes_and_locs[forward_index];
if forward_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let forward_gene_short = self.make_gene_short(&forward_gene_and_loc.gene_uniquename);
nearby_genes.push(forward_gene_short);
}
}
let this_gene_details =
self.genes.get_mut(&this_gene_and_loc.gene_uniquename).unwrap();
this_gene_details.gene_neighbourhood.append(&mut nearby_genes);
}
}
// add interaction, ortholog and paralog annotations
fn process_annotation_feature_rels(&mut self) {
for feature_rel in &self.raw.feature_relationships {
let rel_name = &feature_rel.rel_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
for rel_config in &FEATURE_REL_CONFIGS {
if rel_name == rel_config.rel_type_name &&
is_gene_type(&feature_rel.subject.feat_type.name) &&
is_gene_type(&feature_rel.object.feat_type.name) {
let mut evidence: Option<Evidence> = None;
let mut is_inferred_interaction: bool = false;
let borrowed_publications = feature_rel.publications.borrow();
let maybe_publication = borrowed_publications.get(0);
let maybe_reference_uniquename =
match maybe_publication {
Some(publication) => Some(publication.uniquename.clone()),
None => None,
};
for prop in feature_rel.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "evidence" {
if let Some(ref evidence_long) = prop.value {
for (evidence_code, ev_details) in &self.config.evidence_types {
if &ev_details.long == evidence_long {
evidence = Some(evidence_code.clone());
}
}
if evidence.is_none() {
evidence = Some(evidence_long.clone());
}
}
}
if prop.prop_type.name == "is_inferred" {
if let Some(is_inferred_value) = prop.value.clone() {
if is_inferred_value == "yes" {
is_inferred_interaction = true;
}
}
}
}
let evidence_clone = evidence.clone();
let gene_uniquename = subject_uniquename;
let gene_organism_taxonid = {
self.genes[subject_uniquename].taxonid
};
let other_gene_uniquename = object_uniquename;
let other_gene_organism_taxonid = {
self.genes[object_uniquename].taxonid
};
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction =>
if !is_inferred_interaction {
let interaction_annotation =
InteractionAnnotation {
gene_uniquename: gene_uniquename.clone(),
interactor_uniquename: other_gene_uniquename.clone(),
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
{
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
if rel_name == "interacts_physically" {
gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
if gene_uniquename != other_gene_uniquename {
let other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
if rel_name == "interacts_physically" {
other_gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
other_gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if rel_name == "interacts_physically" {
ref_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
ref_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: gene_uniquename.clone(),
ortholog_uniquename: other_gene_uniquename.clone(),
ortholog_taxonid: other_gene_organism_taxonid,
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == gene_details.taxonid {
ref_details.ortholog_annotations.push(ortholog_annotation);
}
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: gene_uniquename.clone(),
paralog_uniquename: other_gene_uniquename.clone(),
evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
let gene_details = self.genes.get_mut(subject_uniquename).unwrap();
gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == gene_details.taxonid {
ref_details.paralog_annotations.push(paralog_annotation);
}
}
}
}
// for orthologs and paralogs, store the reverse annotation too
let other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
ortholog_uniquename: gene_uniquename.clone(),
ortholog_taxonid: gene_organism_taxonid,
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
};
other_gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == other_gene_details.taxonid {
ref_details.ortholog_annotations.push(ortholog_annotation);
}
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
paralog_uniquename: gene_uniquename.clone(),
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
};
other_gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if self.config.load_organism_taxonid == other_gene_details.taxonid {
ref_details.paralog_annotations.push(paralog_annotation);
}
}
},
}
}
}
}
for ref_details in self.references.values_mut() {
ref_details.physical_interactions.sort();
ref_details.genetic_interactions.sort();
ref_details.ortholog_annotations.sort();
ref_details.paralog_annotations.sort();
}
for gene_details in self.genes.values_mut() {
gene_details.physical_interactions.sort();
gene_details.genetic_interactions.sort();
gene_details.ortholog_annotations.sort();
gene_details.paralog_annotations.sort();
}
}
// find the extension_display_names config for the given termid and relation type name
fn matching_ext_config(&self, annotation_termid: &str,
rel_type_name: &str) -> Option<ExtensionDisplayNames> {
let ext_configs = &self.config.extension_display_names;
if let Some(annotation_term_details) = self.terms.get(annotation_termid) {
for ext_config in ext_configs {
if ext_config.rel_name == rel_type_name {
if let Some(if_descendent_of) = ext_config.if_descendent_of.clone() {
if annotation_term_details.interesting_parents.contains(&if_descendent_of) {
return Some((*ext_config).clone());
}
} else {
return Some((*ext_config).clone());
}
}
}
} else {
panic!("can't find details for term: {}\n", annotation_termid);
}
None
}
// create and returns any TargetOfAnnotations implied by the extension
fn make_target_of_for_ext(&self, cv_name: &str,
genes: &[String],
maybe_genotype_uniquename: &Option<String>,
reference_uniquename: &Option<String>,
annotation_termid: &str,
extension: &[ExtPart]) -> Vec<(GeneUniquename, TargetOfAnnotation)> {
let mut ret_vec = vec![];
for ext_part in extension {
let maybe_ext_config =
self.matching_ext_config(annotation_termid, &ext_part.rel_type_name);
if let ExtRange::Gene(ref target_gene_uniquename) = ext_part.ext_range {
if let Some(ext_config) = maybe_ext_config {
if let Some(reciprocal_display_name) =
ext_config.reciprocal_display {
let (annotation_gene_uniquenames, annotation_genotype_uniquename) =
if maybe_genotype_uniquename.is_some() {
(genes.clone(), maybe_genotype_uniquename.clone())
} else {
(genes.clone(), None)
};
ret_vec.push(((*target_gene_uniquename).clone(),
TargetOfAnnotation {
ontology_name: cv_name.into(),
ext_rel_display_name: reciprocal_display_name,
genes: annotation_gene_uniquenames.to_vec(),
genotype_uniquename: annotation_genotype_uniquename,
reference_uniquename: reference_uniquename.clone(),
}));
}
}
}
}
ret_vec
}
fn add_target_of_annotations(&mut self) {
let mut target_of_annotations: HashMap<GeneUniquename, HashSet<TargetOfAnnotation>> =
HashMap::new();
for term_details in self.terms.values() {
for term_annotations in term_details.cv_annotations.values() {
for term_annotation in term_annotations {
'ANNOTATION: for annotation_id in &term_annotation.annotations {
let annotation = self.annotation_details
.get(&annotation_id).expect("can't find OntAnnotationDetail");
if let Some(ref genotype_uniquename) = annotation.genotype {
let genotype = &self.genotypes[genotype_uniquename];
if genotype.expressed_alleles.len() > 1 {
break 'ANNOTATION;
}
}
let new_annotations =
self.make_target_of_for_ext(&term_details.cv_name,
&annotation.genes,
&annotation.genotype,
&annotation.reference,
&term_details.termid,
&annotation.extension);
for (target_gene_uniquename, new_annotation) in new_annotations {
target_of_annotations
.entry(target_gene_uniquename.clone())
.or_insert_with(HashSet::new)
.insert(new_annotation);
}
}
}
}
}
for (gene_uniquename, mut target_of_annotations) in target_of_annotations {
let gene_details = self.genes.get_mut(&gene_uniquename).unwrap();
gene_details.target_of_annotations = target_of_annotations.drain().collect();
}
}
fn set_deletion_viability(&mut self) {
let mut gene_statuses = HashMap::new();
let condition_string =
|condition_ids: HashSet<String>| {
let mut ids_vec: Vec<String> = condition_ids.iter().cloned().collect();
ids_vec.sort();
ids_vec.join(" ")
};
let viable_termid = &self.config.viability_terms.viable;
let inviable_termid = &self.config.viability_terms.inviable;
for (gene_uniquename, gene_details) in &mut self.genes {
let mut new_status = DeletionViability::Unknown;
if let Some(single_allele_term_annotations) =
gene_details.cv_annotations.get("single_allele_phenotype") {
let mut viable_conditions: HashMap<String, TermId> = HashMap::new();
let mut inviable_conditions: HashMap<String, TermId> = HashMap::new();
for term_annotation in single_allele_term_annotations {
'ANNOTATION: for annotation_id in &term_annotation.annotations {
let annotation = self.annotation_details
.get(&annotation_id).expect("can't find OntAnnotationDetail");
let genotype_uniquename = annotation.genotype.as_ref().unwrap();
let genotype = &self.genotypes[genotype_uniquename];
let allele_uniquename =
genotype.expressed_alleles[0].allele_uniquename.clone();
let allele = &self.alleles[&allele_uniquename];
if allele.allele_type != "deletion" {
continue 'ANNOTATION;
}
let term = &self.terms[&term_annotation.term];
let interesting_parents = &term.interesting_parents;
let conditions_as_string =
condition_string(annotation.conditions.clone());
if interesting_parents.contains(viable_termid) ||
*viable_termid == term_annotation.term {
viable_conditions.insert(conditions_as_string,
term_annotation.term.clone());
} else {
if interesting_parents.contains(inviable_termid) ||
*inviable_termid == term_annotation.term {
inviable_conditions.insert(conditions_as_string,
term_annotation.term.clone());
}
}
}
}
if viable_conditions.is_empty() {
if !inviable_conditions.is_empty() {
new_status = DeletionViability::Inviable;
}
} else {
if inviable_conditions.is_empty() {
new_status = DeletionViability::Viable;
} else {
new_status = DeletionViability::DependsOnConditions;
let viable_conditions_set: HashSet<String> =
viable_conditions.keys().cloned().collect();
let inviable_conditions_set: HashSet<String> =
inviable_conditions.keys().cloned().collect();
let intersecting_conditions =
viable_conditions_set.intersection(&inviable_conditions_set);
if intersecting_conditions.clone().count() > 0 {
println!("{} is viable and inviable with", gene_uniquename);
for cond in intersecting_conditions {
if cond.is_empty() {
println!(" no conditions");
} else {
println!(" conditions: {}", cond);
}
println!(" viable term: {}",
viable_conditions[cond]);
println!(" inviable term: {}",
inviable_conditions[cond]);
}
}
}
}
}
gene_statuses.insert(gene_uniquename.clone(), new_status);
}
for (gene_uniquename, status) in &gene_statuses {
if let Some(ref mut gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details.deletion_viability = status.clone();
}
}
}
fn set_term_details_subsets(&mut self) {
'TERM: for go_slim_conf in self.config.go_slim_terms.clone() {
let slim_termid = &go_slim_conf.termid;
for term_details in self.terms.values_mut() {
if term_details.termid == *slim_termid {
term_details.subsets.push("goslim_pombe".into());
break 'TERM;
}
}
}
}
fn make_gene_short_map(&self) -> IdGeneShortMap {
let mut ret_map = HashMap::new();
for gene_uniquename in self.genes.keys() {
ret_map.insert(gene_uniquename.clone(),
make_gene_short(&self.genes, &gene_uniquename));
}
ret_map
}
fn make_all_cv_summaries(&mut self) {
let gene_short_map = self.make_gene_short_map();
for term_details in self.terms.values_mut() {
make_cv_summaries(term_details, self.config, &self.children_by_termid,
true, true, &gene_short_map, &self.annotation_details);
}
for gene_details in self.genes.values_mut() {
make_cv_summaries(gene_details, &self.config, &self.children_by_termid,
false, true, &gene_short_map, &self.annotation_details);
}
for genotype_details in self.genotypes.values_mut() {
make_cv_summaries(genotype_details, &self.config, &self.children_by_termid,
false, false, &gene_short_map, &self.annotation_details);
}
for reference_details in self.references.values_mut() {
make_cv_summaries( reference_details, &self.config, &self.children_by_termid,
true, true, &gene_short_map, &self.annotation_details);
}
}
fn process_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name != POMBASE_ANN_EXT_TERM_CV_NAME {
let cv_config =
self.config.cv_config_by_name(&cvterm.cv.name);
let annotation_feature_type =
cv_config.feature_type.clone();
let synonyms =
cvterm.cvtermsynonyms.borrow().iter().map(|syn| {
SynonymDetails {
synonym_type: (*syn).synonym_type.name.clone(),
name: syn.name.clone(),
}
}).collect::<Vec<_>>();
self.terms.insert(cvterm.termid(),
TermDetails {
name: cvterm.name.clone(),
cv_name: cvterm.cv.name.clone(),
annotation_feature_type,
interesting_parents: HashSet::new(),
subsets: vec![],
termid: cvterm.termid(),
synonyms,
definition: cvterm.definition.clone(),
direct_ancestors: vec![],
genes_annotated_with: HashSet::new(),
is_obsolete: cvterm.is_obsolete,
single_allele_genotype_uniquenames: HashSet::new(),
cv_annotations: HashMap::new(),
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
annotation_details: HashMap::new(),
gene_count: 0,
genotype_count: 0,
});
}
}
}
fn get_ext_rel_display_name(&self, annotation_termid: &str,
ext_rel_name: &str) -> String {
if let Some(ext_conf) = self.matching_ext_config(annotation_termid, ext_rel_name) {
ext_conf.display_name.clone()
} else {
str::replace(&ext_rel_name, "_", " ")
}
}
fn process_extension_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
for cvtermprop in cvterm.cvtermprops.borrow().iter() {
if (*cvtermprop).prop_type.name.starts_with(ANNOTATION_EXT_REL_PREFIX) {
let ext_rel_name_str =
&(*cvtermprop).prop_type.name[ANNOTATION_EXT_REL_PREFIX.len()..];
let ext_rel_name = String::from(ext_rel_name_str);
let ext_range = (*cvtermprop).value.clone();
let range: ExtRange = if ext_range.starts_with("SP") {
if ext_range.ends_with("-promoter") {
ExtRange::Promoter(ext_range)
} else {
ExtRange::Gene(ext_range)
}
} else {
ExtRange::Misc(ext_range)
};
if let Some(base_termid) =
self.base_term_of_extensions.get(&cvterm.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(&base_termid, &ext_rel_name);
self.parts_of_extensions.entry(cvterm.termid())
.or_insert_with(Vec::new).push(ExtPart {
rel_type_name: ext_rel_name,
rel_type_display_name,
ext_range: range,
});
} else {
panic!("can't find details for term: {}\n", cvterm.termid());
}
}
}
}
}
}
fn process_cvterm_rels(&mut self) {
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name == "is_a" {
self.base_term_of_extensions.insert(subject_termid.clone(),
object_term.termid().clone());
}
} else {
let object_term_short =
self.make_term_short(&object_term.termid());
if let Some(ref mut subject_term_details) = self.terms.get_mut(&subject_term.termid()) {
subject_term_details.direct_ancestors.push(TermAndRelation {
termid: object_term_short.termid.clone(),
term_name: object_term_short.name.clone(),
relation_name: rel_type.name.clone(),
});
}
}
}
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name != "is_a" {
if let Some(base_termid) =
self.base_term_of_extensions.get(&subject_term.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(base_termid, &rel_type.name);
self.parts_of_extensions.entry(subject_termid)
.or_insert_with(Vec::new).push(ExtPart {
rel_type_name: rel_type.name.clone(),
rel_type_display_name,
ext_range: ExtRange::Term(object_term.termid().clone()),
});
} else {
panic!("can't find details for {}\n", object_term.termid());
}
}
}
}
}
fn process_feature_synonyms(&mut self) {
for feature_synonym in &self.raw.feature_synonyms {
let feature = &feature_synonym.feature;
let synonym = &feature_synonym.synonym;
if let Some(ref mut gene_details) = self.genes.get_mut(&feature.uniquename) {
gene_details.synonyms.push(SynonymDetails {
name: synonym.name.clone(),
synonym_type: synonym.synonym_type.name.clone()
});
}
}
}
fn make_genotype_short(&self, genotype_display_name: &str) -> GenotypeShort {
if let Some(ref details) = self.genotypes.get(genotype_display_name) {
GenotypeShort {
display_uniquename: details.display_uniquename.clone(),
name: details.name.clone(),
expressed_alleles: details.expressed_alleles.clone(),
}
} else {
panic!("can't find genotype {}", genotype_display_name);
}
}
fn make_allele_short(&self, allele_uniquename: &str) -> AlleleShort {
self.alleles[allele_uniquename].clone()
}
// process feature properties stored as cvterms,
// eg. characterisation_status and product
fn process_props_from_feature_cvterms(&mut self) {
for feature_cvterm in &self.raw.feature_cvterms {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let gene_uniquenames_vec: Vec<GeneUniquename> =
if cvterm.cv.name == "PomBase gene products" {
if feature.feat_type.name == "polypeptide" {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
if feature.feat_type.name == "gene" {
vec![feature.uniquename.clone()]
} else {
vec![]
}
}
}
} else {
vec![]
};
for gene_uniquename in &gene_uniquenames_vec {
self.add_gene_product(&gene_uniquename, &cvterm.name);
}
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
if cvterm.cv.name == "PomBase gene characterisation status" {
self.add_characterisation_status(&feature.uniquename, &cvterm.name);
} else {
if cvterm.cv.name == "name_description" {
self.add_name_description(&feature.uniquename, &cvterm.name);
}
}
}
}
}
fn get_gene_prod_extension(&self, prod_value: &str) -> ExtPart {
let ext_range =
if prod_value.starts_with("PR:") {
ExtRange::GeneProduct(prod_value.to_owned())
} else {
ExtRange::Misc(prod_value.to_owned())
};
ExtPart {
rel_type_name: "active_form".into(),
rel_type_display_name: "active form".into(),
ext_range,
}
}
// return a fake extension for "with" properties on protein binding annotations
fn get_with_extension(&self, with_value: &str) -> ExtPart {
let ext_range =
if with_value.starts_with("SP%") {
ExtRange::Gene(with_value.to_owned())
} else {
if with_value.starts_with("PomBase:SP") {
let gene_uniquename =
String::from(&with_value[8..]);
ExtRange::Gene(gene_uniquename)
} else {
if with_value.to_lowercase().starts_with("pfam:") {
ExtRange::Domain(with_value.to_owned())
} else {
ExtRange::Misc(with_value.to_owned())
}
}
};
// a with property on a protein binding (GO:0005515) is
// displayed as a binds extension
// https://github.com/pombase/website/issues/108
ExtPart {
rel_type_name: "binds".into(),
rel_type_display_name: "binds".into(),
ext_range,
}
}
fn make_with_or_from_value(&self, with_or_from_value: String) -> WithFromValue {
let db_prefix_patt = String::from("^") + DB_NAME + ":";
let re = Regex::new(&db_prefix_patt).unwrap();
let gene_uniquename: String = re.replace_all(&with_or_from_value, "").into();
if self.genes.contains_key(&gene_uniquename) {
let gene_short = self.make_gene_short(&gene_uniquename);
WithFromValue::Gene(gene_short)
} else {
if self.terms.get(&with_or_from_value).is_some() {
WithFromValue::Term(self.make_term_short(&with_or_from_value))
} else {
WithFromValue::Identifier(with_or_from_value)
}
}
}
// add the with value as a fake extension if the cvterm is_a protein binding,
// otherwise return the value
fn make_with_extension(&self, termid: &str, evidence_code: Option<String>,
extension: &mut Vec<ExtPart>,
with_value: String) -> Option<WithFromValue> {
let base_termid =
match self.base_term_of_extensions.get(termid) {
Some(base_termid) => base_termid.clone(),
None => termid.to_owned(),
};
let base_term_short = self.make_term_short(&base_termid);
if evidence_code.is_some() &&
evidence_code.unwrap() == "IPI" &&
// add new IDs to the interesting_parents config
(base_term_short.termid == "GO:0005515" ||
base_term_short.interesting_parents.contains("GO:0005515") ||
base_term_short.termid == "GO:0003723" ||
base_term_short.interesting_parents.contains("GO:0003723")) {
extension.push(self.get_with_extension(&with_value));
} else {
return Some(self.make_with_or_from_value(with_value));
}
None
}
// process annotation
fn process_feature_cvterms(&mut self) {
for feature_cvterm in &self.raw.feature_cvterms {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let mut extension = vec![];
if cvterm.cv.name == "PomBase gene characterisation status" ||
cvterm.cv.name == "PomBase gene products" ||
cvterm.cv.name == "name_description" {
continue;
}
let publication = &feature_cvterm.publication;
let mut extra_props: HashMap<String, String> = HashMap::new();
let mut conditions: HashSet<TermId> = HashSet::new();
let mut withs: Vec<WithFromValue> = vec![];
let mut froms: Vec<WithFromValue> = vec![];
let mut qualifiers: Vec<Qualifier> = vec![];
let mut evidence: Option<String> = None;
let mut genotype_background: Option<String> = None;
// need to get evidence first as it's used later
// See: https://github.com/pombase/website/issues/455
for prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
if &prop.type_name() == "evidence" {
if let Some(ref evidence_long) = prop.value {
for (evidence_code, ev_details) in &self.config.evidence_types {
if &ev_details.long == evidence_long {
evidence = Some(evidence_code.clone());
}
}
if evidence.is_none() {
evidence = Some(evidence_long.clone());
}
}
}
}
for prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
match &prop.type_name() as &str {
"residue" | "scale" |
"quant_gene_ex_copies_per_cell" |
"quant_gene_ex_avg_copies_per_cell" => {
if let Some(value) = prop.value.clone() {
extra_props.insert(prop.type_name().clone(), value);
}
},
"condition" =>
if let Some(value) = prop.value.clone() {
conditions.insert(value.clone());
},
"qualifier" =>
if let Some(value) = prop.value.clone() {
qualifiers.push(value);
},
"with" => {
if let Some(with_value) = prop.value.clone() {
if let Some(with_gene_short) =
self.make_with_extension(&cvterm.termid(), evidence.clone(),
&mut extension, with_value) {
withs.push(with_gene_short);
}
}
},
"from" => {
if let Some(value) = prop.value.clone() {
froms.push(self.make_with_or_from_value(value));
}
},
"gene_product_form_id" => {
if let Some(value) = prop.value.clone() {
extension.push(self.get_gene_prod_extension(&value));
}
},
_ => ()
}
}
let mut maybe_genotype_uniquename = None;
let mut gene_uniquenames_vec: Vec<GeneUniquename> =
match &feature.feat_type.name as &str {
"polypeptide" => {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
},
"genotype" => {
let expressed_alleles =
&self.alleles_of_genotypes[&feature.uniquename];
let genotype_display_name =
make_genotype_display_name(&expressed_alleles, &self.alleles);
maybe_genotype_uniquename = Some(genotype_display_name.clone());
genotype_background =
self.genotype_backgrounds.get(&feature.uniquename).map(String::clone);
expressed_alleles.iter()
.map(|expressed_allele| {
let allele_short =
self.make_allele_short(&expressed_allele.allele_uniquename);
allele_short.gene_uniquename.clone()
})
.collect()
},
_ => {
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
vec![feature.uniquename.clone()]
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
}
};
gene_uniquenames_vec.dedup();
gene_uniquenames_vec =
gene_uniquenames_vec.iter().map(|gene_uniquename: &String| {
self.make_gene_short(&gene_uniquename).uniquename
}).collect();
let reference_uniquename =
if publication.uniquename == "null" {
None
} else {
Some(publication.uniquename.clone())
};
let mut extra_props_clone = extra_props.clone();
let copies_per_cell = extra_props_clone.remove("quant_gene_ex_copies_per_cell");
let avg_copies_per_cell = extra_props_clone.remove("quant_gene_ex_avg_copies_per_cell");
let scale = extra_props_clone.remove("scale");
let gene_ex_props =
if copies_per_cell.is_some() || avg_copies_per_cell.is_some() {
Some(GeneExProps {
copies_per_cell,
avg_copies_per_cell,
scale,
})
} else {
None
};
if gene_uniquenames_vec.len() > 1 && maybe_genotype_uniquename.is_none() {
panic!("non-genotype annotation has more than one gene");
}
let annotation = OntAnnotationDetail {
id: feature_cvterm.feature_cvterm_id,
genes: gene_uniquenames_vec,
reference: reference_uniquename.clone(),
genotype: maybe_genotype_uniquename.clone(),
genotype_background,
withs: withs.clone(),
froms: froms.clone(),
residue: extra_props_clone.remove("residue"),
gene_ex_props,
qualifiers: qualifiers.clone(),
evidence: evidence.clone(),
conditions: conditions.clone(),
extension: extension.clone(),
};
self.add_annotation(cvterm.borrow(), feature_cvterm.is_not,
annotation);
}
}
fn make_term_annotations(&self, termid: &str, detail_ids: &[OntAnnotationId],
is_not: bool)
-> Vec<(CvName, OntTermAnnotations)> {
let term_details = &self.terms[termid];
let cv_name = term_details.cv_name.clone();
match cv_name.as_ref() {
"gene_ex" => {
if is_not {
panic!("gene_ex annotations can't be NOT annotations");
}
let mut qual_annotations =
OntTermAnnotations {
term: termid.to_owned(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
let mut quant_annotations =
OntTermAnnotations {
term: termid.to_owned(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
for annotation_id in detail_ids {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
if annotation.gene_ex_props.is_some() {
quant_annotations.annotations.push(*annotation_id)
} else {
qual_annotations.annotations.push(*annotation_id)
}
}
let mut return_vec = vec![];
if !qual_annotations.annotations.is_empty() {
return_vec.push((String::from("qualitative_gene_expression"),
qual_annotations));
}
if !quant_annotations.annotations.is_empty() {
return_vec.push((String::from("quantitative_gene_expression"),
quant_annotations));
}
return_vec
},
"fission_yeast_phenotype" => {
let mut single_allele =
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
let mut multi_allele =
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
for annotation_id in detail_ids {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
let genotype_uniquename = annotation.genotype.as_ref().unwrap();
if let Some(genotype_details) = self.genotypes.get(genotype_uniquename) {
if genotype_details.expressed_alleles.len() == 1 {
single_allele.annotations.push(*annotation_id);
} else {
multi_allele.annotations.push(*annotation_id);
}
} else {
panic!("can't find genotype details for {}\n", genotype_uniquename);
}
}
let mut return_vec = vec![];
if !single_allele.annotations.is_empty() {
return_vec.push((String::from("single_allele_phenotype"),
single_allele));
}
if !multi_allele.annotations.is_empty() {
return_vec.push((String::from("multi_allele_phenotype"),
multi_allele));
}
return_vec
},
_ => {
vec![(cv_name,
OntTermAnnotations {
term: termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: detail_ids.to_owned(),
summary: None,
})]
}
}
}
// store the OntTermAnnotations in the TermDetails, GeneDetails,
// GenotypeDetails and ReferenceDetails
fn store_ont_annotations(&mut self, is_not: bool) {
let ont_annotations = if is_not {
&self.all_not_ont_annotations
} else {
&self.all_ont_annotations
};
let mut gene_annotation_by_term: HashMap<GeneUniquename, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
let mut genotype_annotation_by_term: HashMap<GenotypeUniquename, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
let mut ref_annotation_by_term: HashMap<String, HashMap<TermId, Vec<OntAnnotationId>>> =
HashMap::new();
for (termid, annotations) in ont_annotations {
let mut sorted_annotations = annotations.clone();
if !is_not {
let cv_config = {
let term = &self.terms[termid];
&self.config.cv_config_by_name(&term.cv_name)
};
{
let cmp_detail_with_maps =
|id1: &i32, id2: &i32| {
let annotation1 = self.annotation_details.
get(&id1).expect("can't find OntAnnotationDetail");
let annotation2 = self.annotation_details.
get(&id2).expect("can't find OntAnnotationDetail");
let result =
cmp_ont_annotation_detail(cv_config,
annotation1, annotation2, &self.genes,
&self.genotypes,
&self.terms);
result.unwrap_or_else(|err| panic!("error from cmp_ont_annotation_detail: {}", err))
};
sorted_annotations.sort_by(cmp_detail_with_maps);
}
let new_annotations =
self.make_term_annotations(&termid, &sorted_annotations, is_not);
if let Some(ref mut term_details) = self.terms.get_mut(termid) {
for (cv_name, new_annotation) in new_annotations {
term_details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
} else {
panic!("missing termid: {}\n", termid);
}
}
for annotation_id in sorted_annotations {
let annotation = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation.genes {
gene_annotation_by_term.entry(gene_uniquename.clone())
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![])
.push(annotation_id);
}
if let Some(ref genotype_uniquename) = annotation.genotype {
let existing =
genotype_annotation_by_term.entry(genotype_uniquename.clone())
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![]);
if !existing.contains(&annotation_id) {
existing.push(annotation_id);
}
}
if let Some(reference_uniquename) = annotation.reference.clone() {
ref_annotation_by_term.entry(reference_uniquename)
.or_insert_with(HashMap::new)
.entry(termid.clone())
.or_insert_with(|| vec![])
.push(annotation_id);
}
for condition_termid in &annotation.conditions {
let cv_name =
if let Some(ref term_details) = self.terms.get(condition_termid) {
term_details.cv_name.clone()
} else {
panic!("can't find term details for {}", condition_termid);
};
if let Some(ref mut condition_term_details) =
self.terms.get_mut(&condition_termid.clone())
{
condition_term_details.cv_annotations
.entry(cv_name.clone())
.or_insert({
let mut new_vec = Vec::new();
let new_term_annotation =
OntTermAnnotations {
term: condition_termid.clone(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
new_vec.push(new_term_annotation);
new_vec
});
condition_term_details.cv_annotations.get_mut(&cv_name)
.unwrap()[0]
.annotations.push(annotation_id);
}
}
// Add annotations to terms referred to in extensions. They
// are added to fake CV that have a name starting with
// "extension:". The CV name will end with ":genotype" if the
// annotation is a phentoype/genotype, and will end with ":end"
// otherwise. The middle of the fake CV name is the display
// name for the extension relation.
// eg. "extension:directly activates:gene"
for ext_part in &annotation.extension {
if let ExtRange::Term(ref part_termid) = ext_part.ext_range {
let cv_name = "extension:".to_owned() + &ext_part.rel_type_display_name;
if let Some(ref mut part_term_details) =
self.terms.get_mut(part_termid)
{
let extension_cv_name =
if annotation.genotype.is_some() {
cv_name.clone() + ":genotype"
} else {
cv_name.clone() + ":gene"
};
part_term_details.cv_annotations
.entry(extension_cv_name.clone())
.or_insert({
let mut new_vec = Vec::new();
let new_term_annotation =
OntTermAnnotations {
term: part_termid.to_owned(),
is_not,
rel_names: HashSet::new(),
annotations: vec![],
summary: None,
};
new_vec.push(new_term_annotation);
new_vec
});
part_term_details.cv_annotations.get_mut(&extension_cv_name)
.unwrap()[0]
.annotations.push(annotation_id);
}
}
}
}
}
let mut term_names = HashMap::new();
for (termid, term_details) in &self.terms {
term_names.insert(termid.clone(), term_details.name.to_lowercase());
}
let ont_term_cmp = |ont_term_1: &OntTermAnnotations, ont_term_2: &OntTermAnnotations| {
if !ont_term_1.is_not && ont_term_2.is_not {
return Ordering::Less;
}
if ont_term_1.is_not && !ont_term_2.is_not {
return Ordering::Greater;
}
let term1 = &term_names[&ont_term_1.term];
let term2 = &term_names[&ont_term_2.term];
term1.cmp(&term2)
};
for (gene_uniquename, term_annotation_map) in &gene_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
gene_details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
}
let gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for cv_annotations in gene_details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
for (genotype_uniquename, term_annotation_map) in &genotype_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
details.cv_annotations.entry(cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation);
}
}
let details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for cv_annotations in details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
for (reference_uniquename, ref_annotation_map) in &ref_annotation_by_term {
for (termid, details) in ref_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
ref_details.cv_annotations.entry(cv_name).or_insert_with(Vec::new)
.push(new_annotation.clone());
}
}
let ref_details = self.references.get_mut(reference_uniquename).unwrap();
for cv_annotations in ref_details.cv_annotations.values_mut() {
cv_annotations.sort_by(&ont_term_cmp)
}
}
}
// return true if the term could or should appear in the interesting_parents
// field of the TermDetails and TermShort structs
fn is_interesting_parent(&self, termid: &str, rel_name: &str) -> bool {
self.possible_interesting_parents.contains(&InterestingParent {
termid: termid.into(),
rel_name: rel_name.into(),
})
}
fn process_cvtermpath(&mut self) {
let mut new_annotations: HashMap<(CvName, TermId), HashMap<TermId, HashMap<i32, HashSet<RelName>>>> =
HashMap::new();
let mut children_by_termid: HashMap<TermId, HashSet<TermId>> = HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
if let Some(subject_term_details) = self.terms.get(&subject_termid) {
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if rel_term_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&subject_term_details.cv_name.as_str()) {
continue;
}
if !DESCENDANT_REL_NAMES.contains(&rel_term_name.as_str()) {
continue;
}
if subject_term_details.cv_annotations.keys().len() > 0 {
if let Some(object_term_details) = self.terms.get(&object_termid) {
if object_term_details.cv_annotations.keys().len() > 0 {
children_by_termid
.entry(object_termid.clone())
.or_insert_with(HashSet::new)
.insert(subject_termid.clone());
}
}
}
for (cv_name, term_annotations) in &subject_term_details.cv_annotations {
for term_annotation in term_annotations {
for annotation_id in &term_annotation.annotations {
let dest_termid = object_termid.clone();
let source_termid = subject_termid.clone();
if !term_annotation.is_not {
new_annotations.entry((cv_name.clone(), dest_termid))
.or_insert_with(HashMap::new)
.entry(source_termid)
.or_insert_with(HashMap::new)
.entry(*annotation_id)
.or_insert_with(HashSet::new)
.insert(rel_term_name.clone());
}
}
}
}
} else {
panic!("TermDetails not found for {}", &subject_termid);
}
}
for ((dest_cv_name, dest_termid), dest_annotations_map) in new_annotations.drain() {
for (source_termid, source_annotations_map) in dest_annotations_map {
let mut new_annotations: Vec<OntAnnotationId> = vec![];
let mut all_rel_names: HashSet<String> = HashSet::new();
for (annotation_id, rel_names) in source_annotations_map {
new_annotations.push(annotation_id);
for rel_name in rel_names {
all_rel_names.insert(rel_name);
}
}
let dest_cv_config = &self.config.cv_config_by_name(&dest_cv_name);
{
let cmp_detail_with_genotypes =
|id1: &i32, id2: &i32| {
let annotation1 = self.annotation_details.
get(&id1).expect("can't find OntAnnotationDetail");
let annotation2 = self.annotation_details.
get(&id2).expect("can't find OntAnnotationDetail");
let result =
cmp_ont_annotation_detail(dest_cv_config,
annotation1, annotation2, &self.genes,
&self.genotypes, &self.terms);
result.unwrap_or_else(|err| {
panic!("error from cmp_ont_annotation_detail: {} with terms: {} and {}",
err, source_termid, dest_termid)
})
};
new_annotations.sort_by(cmp_detail_with_genotypes);
}
let new_annotations =
self.make_term_annotations(&source_termid, &new_annotations, false);
let dest_term_details = {
self.terms.get_mut(&dest_termid).unwrap()
};
for (_, new_annotation) in new_annotations {
let mut new_annotation_clone = new_annotation.clone();
new_annotation_clone.rel_names.extend(all_rel_names.clone());
dest_term_details.cv_annotations
.entry(dest_cv_name.clone())
.or_insert_with(Vec::new)
.push(new_annotation_clone);
}
}
}
let mut term_names = HashMap::new();
for (termid, term_details) in &self.terms {
term_names.insert(termid.clone(), term_details.name.to_lowercase());
}
let ont_term_cmp = |ont_term_1: &OntTermAnnotations, ont_term_2: &OntTermAnnotations| {
if !ont_term_1.is_not && ont_term_2.is_not {
return Ordering::Less;
}
if ont_term_1.is_not && !ont_term_2.is_not {
return Ordering::Greater;
}
let term1 = &term_names[&ont_term_1.term];
let term2 = &term_names[&ont_term_2.term];
term1.cmp(&term2)
};
for term_details in self.terms.values_mut() {
for term_annotations in term_details.cv_annotations.values_mut() {
term_annotations.sort_by(&ont_term_cmp);
}
}
self.children_by_termid = children_by_termid;
}
fn make_metadata(&mut self) -> Metadata {
let mut db_creation_datetime = None;
for chadoprop in &self.raw.chadoprops {
if chadoprop.prop_type.name == "db_creation_datetime" {
db_creation_datetime = chadoprop.value.clone();
}
}
let mut cv_versions = HashMap::new();
for cvprop in &self.raw.cvprops {
if cvprop.prop_type.name == "cv_version" {
cv_versions.insert(cvprop.cv.name.clone(), cvprop.value.clone());
}
}
const PKG_NAME: &str = env!("CARGO_PKG_NAME");
const VERSION: &str = env!("CARGO_PKG_VERSION");
Metadata {
export_prog_name: String::from(PKG_NAME),
export_prog_version: String::from(VERSION),
db_creation_datetime: db_creation_datetime.unwrap(),
gene_count: self.genes.len(),
term_count: self.terms.len(),
cv_versions,
}
}
pub fn get_api_genotype_annotation(&self) -> HashMap<TermId, Vec<APIGenotypeAnnotation>>
{
let mut app_genotype_annotation = HashMap::new();
for term_details in self.terms.values() {
for annotations_vec in term_details.cv_annotations.values() {
for ont_term_annotations in annotations_vec {
'DETAILS: for annotation_id in &ont_term_annotations.annotations {
let annotation_details = self.annotation_details.
get(&annotation_id).expect("can't find OntAnnotationDetail");
if annotation_details.genotype.is_none() {
continue 'DETAILS;
}
let genotype_uniquename = annotation_details.genotype.clone().unwrap();
let genotype =
&term_details.genotypes_by_uniquename[&genotype_uniquename];
let mut api_annotation = APIGenotypeAnnotation {
is_multi: genotype.expressed_alleles.len() > 1,
alleles: vec![],
};
for allele in &genotype.expressed_alleles {
let allele_uniquename = &allele.allele_uniquename;
let allele_short =
self.alleles.get(allele_uniquename).expect("Can't find allele");
let allele_gene_uniquename =
allele_short.gene_uniquename.clone();
let allele_details = APIAlleleDetails {
gene: allele_gene_uniquename,
allele_type: allele_short.allele_type.clone(),
expression: allele.expression.clone(),
};
api_annotation.alleles.push(allele_details);
}
app_genotype_annotation
.entry(term_details.termid.clone())
.or_insert_with(|| vec![])
.push(api_annotation);
}
}
}
}
app_genotype_annotation
}
fn make_gene_query_go_component_data(&self, gene_details: &GeneDetails) -> Option<GeneQueryTermData> {
let go_components_config = &self.config.query_data_config.go_components;
let component_term_annotations =
gene_details.cv_annotations.get("cellular_component");
if component_term_annotations.is_none() {
return None;
}
let in_component = |check_termid: &str| {
for term_annotation in component_term_annotations.unwrap() {
let maybe_term_details = self.terms.get(&term_annotation.term);
let term_details =
maybe_term_details .unwrap_or_else(|| {
panic!("can't find TermDetails for {}", &term_annotation.term)
});
let interesting_parents = &term_details.interesting_parents;
if !term_annotation.is_not &&
(term_annotation.term == check_termid ||
interesting_parents.contains(check_termid))
{
return true;
}
}
false
};
for go_component_termid in go_components_config {
if in_component(go_component_termid) {
return Some(GeneQueryTermData::Term(TermAndName {
termid: go_component_termid.to_owned(),
name: self.terms.get(go_component_termid).unwrap().name.clone(),
}));
}
}
Some(GeneQueryTermData::Other)
}
fn get_ortholog_taxonids(&self, gene_details: &GeneDetails)
-> HashSet<u32>
{
let mut return_set = HashSet::new();
for ortholog_annotation in &gene_details.ortholog_annotations {
return_set.insert(ortholog_annotation.ortholog_taxonid);
}
return_set
}
fn make_gene_query_data_map(&self) -> HashMap<GeneUniquename, GeneQueryData> {
let mut gene_query_data_map = HashMap::new();
for gene_details in self.genes.values() {
let ortholog_taxonids = self.get_ortholog_taxonids(gene_details);
let gene_query_data = GeneQueryData {
gene_uniquename: gene_details.uniquename.clone(),
deletion_viability: gene_details.deletion_viability.clone(),
go_component: self.make_gene_query_go_component_data(gene_details),
ortholog_taxonids,
};
gene_query_data_map.insert(gene_details.uniquename.clone(), gene_query_data);
}
gene_query_data_map
}
pub fn make_api_maps(mut self) -> APIMaps {
let mut gene_summaries: HashMap<GeneUniquename, APIGeneSummary> = HashMap::new();
let mut gene_name_gene_map = HashMap::new();
let mut interactors_of_genes = HashMap::new();
for (gene_uniquename, gene_details) in &self.genes {
if self.config.load_organism_taxonid == gene_details.taxonid {
let gene_summary = self.make_api_gene_summary(&gene_uniquename);
if let Some(ref gene_name) = gene_summary.name {
gene_name_gene_map.insert(gene_name.clone(), gene_uniquename.clone());
}
gene_summaries.insert(gene_uniquename.clone(), gene_summary);
let mut interactors = vec![];
for interaction_annotation in &gene_details.physical_interactions {
let interactor_uniquename =
if gene_uniquename == &interaction_annotation.gene_uniquename {
interaction_annotation.interactor_uniquename.clone()
} else {
interaction_annotation.gene_uniquename.clone()
};
let interactor = APIInteractor {
interaction_type: InteractionType::Physical,
interactor_uniquename,
};
if !interactors.contains(&interactor) {
interactors.push(interactor);
}
}
for interaction_annotation in &gene_details.genetic_interactions {
let interactor_uniquename =
if gene_uniquename == &interaction_annotation.gene_uniquename {
interaction_annotation.interactor_uniquename.clone()
} else {
interaction_annotation.gene_uniquename.clone()
};
let interactor = APIInteractor {
interaction_type: InteractionType::Genetic,
interactor_uniquename,
};
if !interactors.contains(&interactor) {
interactors.push(interactor);
}
}
interactors_of_genes.insert(gene_uniquename.clone(), interactors);
}
}
let gene_query_data_map = self.make_gene_query_data_map();
let mut term_summaries: HashSet<TermShort> = HashSet::new();
let mut termid_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut terms_for_api: HashMap<TermId, TermDetails> = HashMap::new();
for termid in self.terms.keys() {
term_summaries.insert(self.make_term_short(termid));
}
let termid_genotype_annotation: HashMap<TermId, Vec<APIGenotypeAnnotation>> =
self.get_api_genotype_annotation();
for (termid, term_details) in self.terms.drain() {
let cv_config = &self.config.cv_config;
if let Some(term_config) = cv_config.get(&term_details.cv_name) {
if term_config.feature_type == "gene" {
termid_genes.insert(termid.clone(),
term_details.genes_annotated_with.clone());
}
}
terms_for_api.insert(termid.clone(), term_details.clone());
}
APIMaps {
gene_summaries,
gene_query_data_map,
termid_genes,
termid_genotype_annotation,
term_summaries,
genes: self.genes.clone(),
gene_name_gene_map,
genotypes: self.genotypes.clone(),
terms: terms_for_api.clone(),
interactors_of_genes,
references: self.references.clone(),
other_features: self.other_features.clone(),
annotation_details: self.annotation_details.clone(),
}
}
fn add_cv_annotations_to_maps(&self,
identifier: &str,
cv_annotations: &OntAnnotationMap,
seen_references: &mut HashMap<String, ReferenceShortOptionMap>,
seen_genes: &mut HashMap<String, GeneShortOptionMap>,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_terms: &mut HashMap<String, TermShortOptionMap>) {
for feat_annotations in cv_annotations.values() {
for feat_annotation in feat_annotations.iter() {
self.add_term_to_hash(seen_terms, identifier,
&feat_annotation.term);
for annotation_detail_id in &feat_annotation.annotations {
let annotation_detail = self.annotation_details.
get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
self.add_ref_to_hash(seen_references, identifier,
&annotation_detail.reference);
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(seen_terms, identifier,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(seen_terms, identifier,
range_termid),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(seen_genes, identifier,
allele_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
self.add_genotype_to_hash(seen_genotypes, seen_alleles, seen_genes,
identifier,
&genotype_uniquename);
}
}
}
}
}
fn set_term_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
let mut genes_annotated_with_map: HashMap<TermId, HashSet<GeneUniquename>> =
HashMap::new();
for (termid, term_details) in &self.terms {
for (cv_name, term_annotations) in &term_details.cv_annotations {
for term_annotation in term_annotations {
self.add_term_to_hash(&mut seen_terms, termid,
&term_annotation.term);
for annotation_detail_id in &term_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
self.add_gene_to_hash(&mut seen_genes, termid,
gene_uniquename);
if !cv_name.starts_with("extension:") {
// prevent extension annotations from appears
// in the normal query builder searches
genes_annotated_with_map
.entry(termid.clone()).or_insert_with(HashSet::new)
.insert(gene_uniquename.clone());
}
}
self.add_ref_to_hash(&mut seen_references, termid,
&annotation_detail.reference);
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(&mut seen_terms, termid,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, termid,
range_termid),
ExtRange::Gene(ref ext_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, termid,
ext_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles,
&mut seen_genes, termid,
&genotype_uniquename);
}
}
}
}
}
for (termid, term_details) in &mut self.terms {
if let Some(genes) = seen_genes.remove(termid) {
term_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(termid) {
term_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(termid) {
term_details.alleles_by_uniquename = alleles;
}
if let Some(references) = seen_references.remove(termid) {
term_details.references_by_uniquename = references;
}
if let Some(terms) = seen_terms.remove(termid) {
term_details.terms_by_termid = terms;
}
if let Some(gene_uniquename_set) = genes_annotated_with_map.remove(termid) {
term_details.genes_annotated_with = gene_uniquename_set;
}
}
}
fn set_gene_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
{
for (gene_uniquename, gene_details) in &self.genes {
self.add_cv_annotations_to_maps(&gene_uniquename,
&gene_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
let interaction_iter =
gene_details.physical_interactions.iter().chain(&gene_details.genetic_interactions);
for interaction in interaction_iter {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&interaction.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&interaction.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&interaction.interactor_uniquename);
}
for ortholog_annotation in &gene_details.ortholog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&ortholog_annotation.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&ortholog_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
&ortholog_annotation.ortholog_uniquename);
}
for paralog_annotation in &gene_details.paralog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
¶log_annotation.reference_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
¶log_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
¶log_annotation.paralog_uniquename);
}
for target_of_annotation in &gene_details.target_of_annotations {
for annotation_gene_uniquename in &target_of_annotation.genes {
self.add_gene_to_hash(&mut seen_genes, gene_uniquename,
annotation_gene_uniquename);
}
if let Some(ref annotation_genotype_uniquename) = target_of_annotation.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
gene_uniquename,
&annotation_genotype_uniquename)
}
self.add_ref_to_hash(&mut seen_references, gene_uniquename,
&target_of_annotation.reference_uniquename);
}
}
}
for (gene_uniquename, gene_details) in &mut self.genes {
if let Some(references) = seen_references.remove(gene_uniquename) {
gene_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(gene_uniquename) {
gene_details.alleles_by_uniquename = alleles;
}
if let Some(genes) = seen_genes.remove(gene_uniquename) {
gene_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(gene_uniquename) {
gene_details.genotypes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(gene_uniquename) {
gene_details.terms_by_termid = terms;
}
}
}
fn set_genotype_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (genotype_uniquename, genotype_details) in &self.genotypes {
self.add_cv_annotations_to_maps(&genotype_uniquename,
&genotype_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
if let Some(references) = seen_references.remove(genotype_uniquename) {
genotype_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(genotype_uniquename) {
genotype_details.alleles_by_uniquename = alleles;
}
if let Some(genotypes) = seen_genes.remove(genotype_uniquename) {
genotype_details.genes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(genotype_uniquename) {
genotype_details.terms_by_termid = terms;
}
}
}
fn set_reference_details_maps(&mut self) {
let mut seen_genes: HashMap<String, GeneShortOptionMap> = HashMap::new();
type GenotypeShortMap = HashMap<GenotypeUniquename, GenotypeShort>;
let mut seen_genotypes: HashMap<ReferenceUniquename, GenotypeShortMap> = HashMap::new();
type AlleleShortMap = HashMap<AlleleUniquename, AlleleShort>;
let mut seen_alleles: HashMap<TermId, AlleleShortMap> = HashMap::new();
let mut seen_terms: HashMap<GeneUniquename, TermShortOptionMap> = HashMap::new();
{
for (reference_uniquename, reference_details) in &self.references {
for feat_annotations in reference_details.cv_annotations.values() {
for feat_annotation in feat_annotations.iter() {
self.add_term_to_hash(&mut seen_terms, reference_uniquename,
&feat_annotation.term);
for annotation_detail_id in &feat_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
gene_uniquename)
}
for condition_termid in &annotation_detail.conditions {
self.add_term_to_hash(&mut seen_terms, reference_uniquename,
condition_termid);
}
for ext_part in &annotation_detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms,
reference_uniquename,
range_termid),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes,
reference_uniquename,
allele_gene_uniquename),
_ => {},
}
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
let genotype = self.make_genotype_short(genotype_uniquename);
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
reference_uniquename,
&genotype.display_uniquename);
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter()
.chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&interaction.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&interaction.interactor_uniquename);
}
for ortholog_annotation in &reference_details.ortholog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&ortholog_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
&ortholog_annotation.ortholog_uniquename);
}
for paralog_annotation in &reference_details.paralog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
¶log_annotation.gene_uniquename);
self.add_gene_to_hash(&mut seen_genes, reference_uniquename,
¶log_annotation.paralog_uniquename);
}
}
}
for (reference_uniquename, reference_details) in &mut self.references {
if let Some(genes) = seen_genes.remove(reference_uniquename) {
reference_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(reference_uniquename) {
reference_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(reference_uniquename) {
reference_details.alleles_by_uniquename = alleles;
}
if let Some(terms) = seen_terms.remove(reference_uniquename) {
reference_details.terms_by_termid = terms;
}
}
}
pub fn set_counts(&mut self) {
let mut term_seen_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_seen_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut term_seen_single_allele_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut ref_seen_genes: HashMap<ReferenceUniquename, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
let mut seen_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
let mut seen_single_allele_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
for term_annotations in term_details.cv_annotations.values() {
for term_annotation in term_annotations {
for annotation_detail_id in &term_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
for gene_uniquename in &annotation_detail.genes {
seen_genes.insert(gene_uniquename.clone());
}
if let Some(ref genotype_uniquename) = annotation_detail.genotype {
seen_genotypes.insert(genotype_uniquename.clone());
let genotype = &self.genotypes[genotype_uniquename];
if genotype.expressed_alleles.len() == 1 {
seen_single_allele_genotypes.insert(genotype_uniquename.clone());
}
}
}
}
}
term_seen_genes.insert(termid.clone(), seen_genes);
term_seen_genotypes.insert(termid.clone(), seen_genotypes);
term_seen_single_allele_genotypes.insert(termid.clone(), seen_single_allele_genotypes);
}
for (reference_uniquename, reference_details) in &self.references {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
for rel_annotations in reference_details.cv_annotations.values() {
for rel_annotation in rel_annotations {
for annotation_detail_id in &rel_annotation.annotations {
let annotation_detail = self.annotation_details
.get(&annotation_detail_id).expect("can't find OntAnnotationDetail");
if !rel_annotation.is_not {
for gene_uniquename in &annotation_detail.genes {
seen_genes.insert(gene_uniquename.clone());
}
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
seen_genes.insert(interaction.gene_uniquename.clone());
seen_genes.insert(interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
seen_genes.insert(ortholog_annotation.gene_uniquename.clone());
}
ref_seen_genes.insert(reference_uniquename.clone(), seen_genes);
}
for term_details in self.terms.values_mut() {
term_details.single_allele_genotype_uniquenames =
term_seen_single_allele_genotypes.remove(&term_details.termid).unwrap();
term_details.gene_count =
term_seen_genes[&term_details.termid].len();
term_details.genotype_count =
term_seen_genotypes[&term_details.termid].len();
}
}
fn make_non_bp_slim_gene_subset(&self, go_slim_subset: &TermSubsetDetails)
-> IdGeneSubsetMap
{
let slim_termid_set: HashSet<String> =
go_slim_subset.elements
.iter().map(|element| element.termid.clone()).collect();
let mut non_slim_with_bp_annotation = HashSet::new();
let mut non_slim_without_bp_annotation = HashSet::new();
let has_parent_in_slim = |term_annotations: &Vec<OntTermAnnotations>| {
for term_annotation in term_annotations {
let interesting_parents =
&self.terms[&term_annotation.term].interesting_parents;
if !term_annotation.is_not &&
(slim_termid_set.contains(&term_annotation.term) ||
interesting_parents.intersection(&slim_termid_set).count() > 0)
{
return true;
}
}
false
};
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
if gene_details.feature_type != "mRNA gene" {
continue;
}
if gene_details.characterisation_status == Some("transposon".into()) ||
gene_details.characterisation_status == Some("dubious".into())
{
continue;
}
let mut bp_count = 0;
if let Some(annotations) =
gene_details.cv_annotations.get("biological_process") {
if has_parent_in_slim(&annotations) {
continue
}
bp_count = annotations.len();
}
if bp_count == 0 {
non_slim_without_bp_annotation.insert(gene_details.uniquename.clone());
} else {
non_slim_with_bp_annotation.insert(gene_details.uniquename.clone());
}
}
let mut return_map = HashMap::new();
return_map.insert("non_go_slim_with_bp_annotation".into(),
GeneSubsetDetails {
name: "non_go_slim_with_bp_annotation".into(),
display_name: String::from("Proteins with biological process ") +
"annotation that are not in a slim category",
elements: non_slim_with_bp_annotation,
});
return_map.insert("non_go_slim_without_bp_annotation".into(),
GeneSubsetDetails {
name: "non_go_slim_without_bp_annotation".into(),
display_name: String::from("Proteins with no biological process ") +
"annotation and are not in a slim category",
elements: non_slim_without_bp_annotation,
});
return_map
}
fn make_bp_go_slim_subset(&self) -> TermSubsetDetails {
let mut all_genes = HashSet::new();
let mut go_slim_subset: HashSet<TermSubsetElement> = HashSet::new();
for go_slim_conf in self.config.go_slim_terms.clone() {
let slim_termid = go_slim_conf.termid;
let term_details = self.terms.get(&slim_termid)
.unwrap_or_else(|| panic!("can't find TermDetails for {}", &slim_termid));
let subset_element = TermSubsetElement {
name: term_details.name.clone(),
termid: slim_termid.clone(),
gene_count: term_details.genes_annotated_with.len(),
};
for gene in &term_details.genes_annotated_with {
all_genes.insert(gene);
}
go_slim_subset.insert(subset_element);
}
TermSubsetDetails {
name: "goslim_pombe".into(),
total_gene_count: all_genes.len(),
elements: go_slim_subset,
}
}
fn make_feature_type_subsets(&self, subsets: &mut IdGeneSubsetMap) {
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
let subset_name =
String::from("feature_type:") + &gene_details.feature_type;
let re = Regex::new(r"[\s,:]+").unwrap();
let subset_name_no_spaces: String = re.replace_all(&subset_name, "_").into();
subsets.entry(subset_name_no_spaces.clone())
.or_insert(GeneSubsetDetails {
name: subset_name_no_spaces,
display_name: subset_name,
elements: HashSet::new()
})
.elements.insert(gene_details.uniquename.clone());
}
}
// make subsets using the characterisation_status field of GeneDetails
fn make_characterisation_status_subsets(&self, subsets: &mut IdGeneSubsetMap) {
for gene_details in self.genes.values() {
if self.config.load_organism_taxonid != gene_details.taxonid {
continue;
}
if gene_details.feature_type != "mRNA gene" {
continue;
}
if let Some(ref characterisation_status) = gene_details.characterisation_status {
let subset_name =
String::from("characterisation_status:") + &characterisation_status;
let re = Regex::new(r"[\s,:]+").unwrap();
let subset_name_no_spaces: String = re.replace_all(&subset_name, "_").into();
subsets.entry(subset_name_no_spaces.clone())
.or_insert(GeneSubsetDetails {
name: subset_name_no_spaces,
display_name: subset_name,
elements: HashSet::new()
})
.elements.insert(gene_details.uniquename.clone());
}
}
}
// make InterPro subsets using the interpro_matches field of GeneDetails
fn make_interpro_subsets(&mut self, subsets: &mut IdGeneSubsetMap) {
for (gene_uniquename, gene_details) in &self.genes {
for interpro_match in &gene_details.interpro_matches {
let mut new_subset_names = vec![];
if !interpro_match.interpro_id.is_empty() {
let subset_name =
String::from("interpro:") + &interpro_match.interpro_id;
new_subset_names.push((subset_name,
interpro_match.interpro_name.clone()));
}
let subset_name = String::from("interpro:") +
&interpro_match.dbname.clone() + ":" + &interpro_match.id;
new_subset_names.push((subset_name, interpro_match.name.clone()));
for (subset_name, display_name) in new_subset_names {
subsets.entry(subset_name.clone())
.or_insert(GeneSubsetDetails {
name: subset_name,
display_name,
elements: HashSet::new(),
})
.elements.insert(gene_uniquename.clone());
}
}
}
}
// populated the subsets HashMap
fn make_subsets(&mut self) {
let bp_go_slim_subset = self.make_bp_go_slim_subset();
let mut gene_subsets =
self.make_non_bp_slim_gene_subset(&bp_go_slim_subset);
self.term_subsets.insert("bp_goslim_pombe".into(), bp_go_slim_subset);
self.make_feature_type_subsets(&mut gene_subsets);
self.make_characterisation_status_subsets(&mut gene_subsets);
self.make_interpro_subsets(&mut gene_subsets);
self.gene_subsets = gene_subsets;
}
// sort the list of genes in the ChromosomeDetails by start_pos
pub fn sort_chromosome_genes(&mut self) {
let mut genes_to_sort: HashMap<ChromosomeName, Vec<GeneUniquename>> =
HashMap::new();
{
let sorter = |uniquename1: &GeneUniquename, uniquename2: &GeneUniquename| {
let gene1 = &self.genes[uniquename1];
let gene2 = &self.genes[uniquename2];
if let Some(ref gene1_loc) = gene1.location {
if let Some(ref gene2_loc) = gene2.location {
let cmp = gene1_loc.start_pos.cmp(&gene2_loc.start_pos);
if cmp != Ordering::Equal {
return cmp;
}
}
}
if gene1.name.is_some() {
if gene2.name.is_some() {
gene1.name.cmp(&gene2.name)
} else {
Ordering::Less
}
} else {
if gene2.name.is_some() {
Ordering::Greater
} else {
gene1.uniquename.cmp(&gene2.uniquename)
}
}
};
for (chr_uniquename, chr_details) in &self.chromosomes {
genes_to_sort.insert(chr_uniquename.clone(),
chr_details.gene_uniquenames.clone());
}
for gene_uniquenames in genes_to_sort.values_mut() {
gene_uniquenames.sort_by(&sorter);
}
}
for (chr_uniquename, gene_uniquenames) in genes_to_sort {
self.chromosomes.get_mut(&chr_uniquename).unwrap().gene_uniquenames =
gene_uniquenames;
}
}
// remove some of the refs that have no annotations.
// See: https://github.com/pombase/website/issues/628
fn remove_non_curatable_refs(&mut self) {
let filtered_refs = self.references.drain()
.filter(|&(_, ref reference_details)| {
if reference_has_annotation(reference_details) {
return true;
}
if let Some(ref canto_triage_status) = reference_details.canto_triage_status {
if canto_triage_status == "New" {
return false;
}
} else {
if reference_details.uniquename.starts_with("PMID:") {
print!("reference {} has no canto_triage_status\n", reference_details.uniquename);
}
}
if let Some (ref triage_status) = reference_details.canto_triage_status {
return triage_status != "Wrong organism" && triage_status != "Loaded in error";
}
// default to true because there are references that
// haven't or shouldn't be triaged, eg. GO_REF:...
true
})
.into_iter().collect();
self.references = filtered_refs;
}
fn make_solr_term_summaries(&mut self) -> Vec<SolrTermSummary> {
let mut return_summaries = vec![];
let term_name_split_re = Regex::new(r"\W+").unwrap();
for (termid, term_details) in &self.terms {
if term_details.cv_annotations.keys()
.filter(|cv_name| !cv_name.starts_with("extension:"))
.next().is_none() {
continue;
}
let trimmable_p = |c: char| {
c.is_whitespace() || c == ',' || c == ':'
|| c == ';' || c == '.' || c == '\''
};
let term_name_words =
term_name_split_re.split(&term_details.name)
.map(|s: &str| {
s.trim_matches(&trimmable_p).to_owned()
}).collect::<Vec<String>>();
let mut close_synonyms = vec![];
let mut close_synonym_words_vec: Vec<String> = vec![];
let mut distant_synonyms = vec![];
let mut distant_synonym_words_vec: Vec<String> = vec![];
let add_to_words_vec = |synonym: &str, words_vec: &mut Vec<String>| {
let synonym_words = term_name_split_re.split(&synonym);
for word in synonym_words {
let word_string = word.trim_matches(&trimmable_p).to_owned();
if !words_vec.contains(&word_string) &&
!term_name_words.contains(&word_string) {
words_vec.push(word_string);
}
}
};
for synonym in &term_details.synonyms {
if synonym.synonym_type == "exact" || synonym.synonym_type == "narrow" {
add_to_words_vec(&synonym.name, &mut close_synonym_words_vec);
close_synonyms.push(synonym.name.clone());
} else {
add_to_words_vec(&synonym.name, &mut distant_synonym_words_vec);
distant_synonyms.push(synonym.name.clone());
}
}
distant_synonyms = distant_synonyms.into_iter()
.filter(|synonym| {
!close_synonyms.contains(&synonym)
})
.collect::<Vec<_>>();
let interesting_parents_for_solr =
term_details.interesting_parents.clone();
let term_summ = SolrTermSummary {
id: termid.clone(),
cv_name: term_details.cv_name.clone(),
name: term_details.name.clone(),
definition: term_details.definition.clone(),
close_synonyms,
close_synonym_words: close_synonym_words_vec.join(" "),
distant_synonyms,
distant_synonym_words: distant_synonym_words_vec.join(" "),
interesting_parents: interesting_parents_for_solr,
};
return_summaries.push(term_summ);
}
return_summaries
}
fn make_solr_reference_summaries(&mut self) -> Vec<SolrReferenceSummary> {
let mut return_summaries = vec![];
for reference_details in self.references.values() {
return_summaries.push(SolrReferenceSummary::from_reference_details(reference_details));
}
return_summaries
}
pub fn get_web_data(mut self) -> WebData {
self.process_dbxrefs();
self.process_references();
self.process_chromosome_features();
self.make_feature_rel_maps();
self.process_features();
self.add_gene_neighbourhoods();
self.process_props_from_feature_cvterms();
self.process_allele_features();
self.process_genotype_features();
self.process_cvterms();
self.add_interesting_parents();
self.process_cvterm_rels();
self.process_extension_cvterms();
self.process_feature_synonyms();
self.process_feature_cvterms();
self.store_ont_annotations(false);
self.store_ont_annotations(true);
self.process_cvtermpath();
self.process_annotation_feature_rels();
self.add_target_of_annotations();
self.set_deletion_viability();
self.set_term_details_subsets();
self.make_all_cv_summaries();
self.remove_non_curatable_refs();
self.set_term_details_maps();
self.set_gene_details_maps();
self.set_genotype_details_maps();
self.set_reference_details_maps();
self.set_counts();
self.make_subsets();
self.sort_chromosome_genes();
let metadata = self.make_metadata();
let mut gene_summaries: Vec<GeneSummary> = vec![];
for (gene_uniquename, gene_details) in &self.genes {
if self.config.load_organism_taxonid == gene_details.taxonid {
gene_summaries.push(self.make_gene_summary(&gene_uniquename));
}
}
let solr_term_summaries = self.make_solr_term_summaries();
let solr_reference_summaries = self.make_solr_reference_summaries();
let solr_data = SolrData {
term_summaries: solr_term_summaries,
gene_summaries: gene_summaries.clone(),
reference_summaries: solr_reference_summaries,
};
let chromosomes = self.chromosomes.clone();
let mut chromosome_summaries = vec![];
for chr_details in self.chromosomes.values() {
chromosome_summaries.push(chr_details.make_chromosome_short());
}
let term_subsets = self.term_subsets.clone();
let gene_subsets = self.gene_subsets.clone();
let recent_references = self.recent_references.clone();
let all_community_curated = self.all_community_curated.clone();
WebData {
metadata,
chromosomes,
chromosome_summaries,
recent_references,
all_community_curated,
api_maps: self.make_api_maps(),
search_gene_summaries: gene_summaries,
term_subsets,
gene_subsets,
solr_data,
}
}
}
|
use std::rc::Rc;
use std::collections::hash_map::HashMap;
use std::collections::HashSet;
use std::borrow::Borrow;
use std::cmp::Ordering;
use regex::Regex;
use db::*;
use web::data::*;
use web::config::*;
use web::vec_set::*;
include!(concat!(env!("OUT_DIR"), "/config_serde.rs"));
fn make_organism_short(rc_organism: &Rc<Organism>) -> OrganismShort {
OrganismShort {
genus: rc_organism.genus.clone(),
species: rc_organism.species.clone(),
}
}
#[derive(Clone)]
pub struct AlleleAndExpression {
allele_uniquename: String,
expression: Option<String>,
}
pub struct WebDataBuild<'a> {
raw: &'a Raw,
config: &'a Config,
genes: UniquenameGeneMap,
transcripts: UniquenameTranscriptMap,
genotypes: UniquenameGenotypeMap,
alleles: UniquenameAlleleMap,
terms: HashMap<TermId, TermDetails>,
references: IdReferenceMap,
all_ont_annotations: HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>,
all_not_ont_annotations: HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>,
genes_of_transcripts: HashMap<String, String>,
transcripts_of_polypeptides: HashMap<String, String>,
genes_of_alleles: HashMap<String, String>,
alleles_of_genotypes: HashMap<String, Vec<AlleleAndExpression>>,
// gene_uniquename vs transcript_type_name:
transcript_type_of_genes: HashMap<String, String>,
// a map from IDs of terms from the "PomBase annotation extension terms" cv
// to a Vec of the details of each of the extension
parts_of_extensions: HashMap<TermId, Vec<ExtPart>>,
base_term_of_extensions: HashMap<TermId, TermId>,
}
fn get_maps() ->
(HashMap<String, ReferenceShortMap>,
HashMap<String, GeneShortMap>,
HashMap<String, GenotypeShortMap>,
HashMap<String, AlleleShortMap>,
HashMap<GeneUniquename, TermShortMap>)
{
(HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new())
}
fn get_feat_rel_expression(feature_relationship: &FeatureRelationship) -> Option<String> {
for prop in feature_relationship.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "expression" {
return prop.value.clone();
}
}
None
}
fn is_gene_type(feature_type_name: &str) -> bool {
feature_type_name == "gene" || feature_type_name == "pseudogene"
}
pub fn remove_first<T, P>(vec: &mut Vec<T>, predicate: P) -> Option<T>
where P: FnMut(&T) -> bool {
if let Some(pos) = vec.iter().position(predicate) {
return Some(vec.remove(pos));
}
None
}
pub fn merge_gene_ext_parts(ext_part1: &ExtPart, ext_part2: &ExtPart) -> ExtPart {
if ext_part1.rel_type_name == ext_part2.rel_type_name {
if let ExtRange::SummaryGenes(ref part1_summ_genes) = ext_part1.ext_range {
if let ExtRange::SummaryGenes(ref part2_summ_genes) = ext_part2.ext_range {
let mut ret_ext_part = ext_part1.clone();
let mut new_genes = [part1_summ_genes.clone(), part2_summ_genes.clone()].concat();
new_genes.sort();
new_genes.dedup();
ret_ext_part.ext_range = ExtRange::SummaryGenes(new_genes);
return ret_ext_part
}
}
panic!("passed ExtPart objects that have non-gene ranges to merge_gene_ext_parts():
{:?} {:?}", ext_part1, ext_part2);
} else {
panic!("passed ExtPart objects with mismatched relations to merge_gene_ext_parts():
{} {}\n", ext_part1.rel_type_name, ext_part2.rel_type_name);
}
}
pub fn collect_ext_summary_genes(cv_config: &CvConfig, rows: Vec<TermSummaryRow>)
-> Vec<TermSummaryRow> {
let conf_gene_rels = &cv_config.summary_gene_relations_to_collect;
let gene_range_rel_p =
|ext_part: &ExtPart| {
if let ExtRange::SummaryGenes(_) = ext_part.ext_range {
conf_gene_rels.contains(&ext_part.rel_type_name)
} else {
false
}
};
let mut ret_rows = vec![];
let mut row_iter = rows.iter().cloned();
if let Some(mut prev_row) = row_iter.next() {
for current_row in row_iter {
if prev_row.gene_uniquenames != current_row.gene_uniquenames ||
prev_row.genotype_uniquename != current_row.genotype_uniquename {
ret_rows.push(prev_row);
prev_row = current_row;
continue;
}
let mut prev_row_extension = prev_row.extension.clone();
let prev_matching_gene_ext_part =
remove_first(&mut prev_row_extension, &gene_range_rel_p);
let mut current_row_extension = current_row.extension.clone();
let current_matching_gene_ext_part =
remove_first(&mut current_row_extension, &gene_range_rel_p);
if let (Some(prev_gene_ext_part), Some(current_gene_ext_part)) =
(prev_matching_gene_ext_part, current_matching_gene_ext_part) {
if current_row_extension == prev_row_extension &&
prev_gene_ext_part.rel_type_name == current_gene_ext_part.rel_type_name {
let merged_gene_ext_parts =
merge_gene_ext_parts(&prev_gene_ext_part,
¤t_gene_ext_part);
let mut new_ext = vec![merged_gene_ext_parts];
new_ext.extend_from_slice(&prev_row_extension);
prev_row.extension = new_ext;
} else {
ret_rows.push(prev_row);
prev_row = current_row;
}
} else {
ret_rows.push(prev_row);
prev_row = current_row
}
}
ret_rows.push(prev_row);
}
ret_rows
}
// combine rows that have a gene and no extension into one row
pub fn collect_summary_rows(rows: &mut Vec<TermSummaryRow>) {
let mut no_ext_rows = vec![];
let mut other_rows = vec![];
for row in rows.drain(0..) {
if row.gene_uniquenames.len() > 0 && row.extension.len() == 0 {
if row.gene_uniquenames.len() > 1 {
panic!("row has more than one gene\n");
}
no_ext_rows.push(row);
} else {
other_rows.push(row);
}
}
let gene_uniquenames: Vec<String> =
no_ext_rows.iter().map(|row| row.gene_uniquenames.get(0).unwrap().clone())
.collect();
let genes_row = TermSummaryRow {
gene_uniquenames: gene_uniquenames,
genotype_uniquename: None,
extension: vec![],
};
rows.clear();
rows.push(genes_row);
rows.append(&mut other_rows);
}
// Remove annotations from the summary where there is another more
// specific annotation. ie. the same annotation but with extra part(s) in the
// extension.
// See: https://github.com/pombase/website/issues/185
pub fn remove_redundant_summary_rows(rows: &mut Vec<TermSummaryRow>) {
let mut results = vec![];
rows.sort();
rows.reverse();
let mut vec_set = VecSet::new();
let mut prev = rows.remove(0);
results.push(prev.clone());
if prev.gene_uniquenames.len() > 1 {
panic!("remove_redundant_summary_rows() failed: num genes > 1\n");
}
vec_set.insert(&prev.extension);
for current in rows.drain(0..) {
if current.gene_uniquenames.len() > 1 {
panic!("remove_redundant_summary_rows() failed: num genes > 1\n");
}
if prev.gene_uniquenames.len() == 0 &&
current.gene_uniquenames.len() == 0 ||
prev.gene_uniquenames.get(1) == current.gene_uniquenames.get(1) &&
!vec_set.contains_superset(¤t.extension) {
results.push(current.clone());
vec_set.insert(¤t.extension);
}
prev = current;
}
results.sort();
*rows = results;
}
fn make_cv_summaries(config: &Config, cvtermpath: &Vec<Rc<Cvtermpath>>,
include_gene: bool, include_genotype: bool,
term_and_annotations_vec: &Vec<OntTermAnnotations>) -> Vec<OntTermSummary> {
let mut result = vec![];
for ref term_and_annotations in term_and_annotations_vec {
let term = &term_and_annotations.term;
let cv_config = config.cv_config_by_name(&term.cv_name);
let mut rows = vec![];
for annotation in &term_and_annotations.annotations {
if annotation.gene_uniquename.is_none() &&
annotation.genotype_uniquename.is_none() &&
annotation.extension.len() == 0 {
continue;
}
let mut summary_extension = annotation.extension.iter().cloned()
.filter(|ext_part|
!cv_config.summary_relations_to_hide.contains(&ext_part.rel_type_name))
.map(move |mut ext_part| {
if let ExtRange::Gene(gene_uniquename) = ext_part.ext_range.clone() {
let summ_genes = vec![gene_uniquename];
ext_part.ext_range = ExtRange::SummaryGenes(vec![summ_genes]);
}
ext_part })
.collect::<Vec<ExtPart>>();
summary_extension.sort();
collect_duplicated_relations(&mut summary_extension);
let gene_uniquenames =
if include_gene && cv_config.feature_type == "gene" {
if let Some(ref gene_uniquename) = annotation.gene_uniquename {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
};
let maybe_genotype_uniquename =
if include_genotype && cv_config.feature_type == "genotype" {
annotation.genotype_uniquename.clone()
} else {
None
};
let row = TermSummaryRow {
gene_uniquenames: gene_uniquenames,
genotype_uniquename: maybe_genotype_uniquename,
extension: summary_extension,
};
rows.push(row);
}
remove_redundant_summary_rows(&mut rows);
collect_summary_rows(&mut rows);
let summary = OntTermSummary {
term: term_and_annotations.term.clone(),
is_not: term_and_annotations.is_not,
rel_names: term_and_annotations.rel_names.clone(),
rows: collect_ext_summary_genes(&cv_config, rows),
};
result.push(summary);
}
result
}
// turns binds([[gene1]]),binds([[gene2]]),other_rel(...) into:
// binds([[gene1, gene2]]),other_rel(...)
pub fn collect_duplicated_relations(ext: &mut Vec<ExtPart>) {
let mut result: Vec<ExtPart> = vec![];
{
let mut iter = ext.iter().cloned();
if let Some(mut prev) = iter.next() {
for current in iter {
if prev.rel_type_name != current.rel_type_name {
result.push(prev);
prev = current;
continue;
}
if let ExtRange::SummaryGenes(ref current_summ_genes) = current.ext_range {
if let ExtRange::SummaryGenes(ref mut prev_summ_genes) = prev.ext_range {
let mut current_genes = current_summ_genes.get(0).unwrap().clone();
prev_summ_genes.get_mut(0).unwrap().append(& mut current_genes);
continue;
}
}
result.push(prev);
prev = current;
}
result.push(prev);
}
}
ext.clear();
ext.append(&mut result);
}
impl <'a> WebDataBuild<'a> {
pub fn new(raw: &'a Raw, config: &'a Config) -> WebDataBuild<'a> {
WebDataBuild {
raw: raw,
config: config,
genes: HashMap::new(),
transcripts: HashMap::new(),
genotypes: HashMap::new(),
alleles: HashMap::new(),
terms: HashMap::new(),
references: HashMap::new(),
all_ont_annotations: HashMap::new(),
all_not_ont_annotations: HashMap::new(),
genes_of_transcripts: HashMap::new(),
transcripts_of_polypeptides: HashMap::new(),
genes_of_alleles: HashMap::new(),
alleles_of_genotypes: HashMap::new(),
transcript_type_of_genes: HashMap::new(),
parts_of_extensions: HashMap::new(),
base_term_of_extensions: HashMap::new(),
}
}
fn add_ref_to_hash(&self,
seen_references: &mut HashMap<String, ReferenceShortMap>,
identifier: String,
maybe_reference_uniquename: Option<ReferenceUniquename>) {
if let Some(reference_uniquename) = maybe_reference_uniquename {
if let Some(reference_short) = self.make_reference_short(&reference_uniquename) {
seen_references
.entry(identifier.clone())
.or_insert(HashMap::new())
.insert(reference_uniquename.clone(),
reference_short);
}
}
}
fn add_gene_to_hash(&self,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
other_gene_uniquename: GeneUniquename) {
seen_genes
.entry(identifier)
.or_insert(HashMap::new())
.insert(other_gene_uniquename.clone(),
self.make_gene_short(&other_gene_uniquename));
}
fn add_genotype_to_hash(&self,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
genotype_uniquename: &GenotypeUniquename) {
let genotype = self.make_genotype_short(genotype_uniquename);
for expressed_allele in &genotype.expressed_alleles {
self.add_allele_to_hash(seen_alleles, seen_genes, identifier.clone(),
expressed_allele.allele_uniquename.clone());
}
seen_genotypes
.entry(identifier)
.or_insert(HashMap::new())
.insert(genotype_uniquename.clone(),
self.make_genotype_short(genotype_uniquename));
}
fn add_allele_to_hash(&self,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
allele_uniquename: AlleleUniquename) -> AlleleShort {
let allele_short = self.make_allele_short(&allele_uniquename);
let allele_gene_uniquename =
allele_short.gene_uniquename.clone();
self.add_gene_to_hash(seen_genes, identifier.clone(), allele_gene_uniquename);
seen_alleles
.entry(identifier)
.or_insert(HashMap::new())
.insert(allele_uniquename, allele_short.clone());
allele_short
}
fn add_term_to_hash(&self,
seen_terms: &mut HashMap<TermId, TermShortMap>,
identifier: String,
other_termid: TermId) {
seen_terms
.entry(identifier)
.or_insert(HashMap::new())
.insert(other_termid.clone(),
self.make_term_short(&other_termid));
}
fn get_gene<'b>(&'b self, gene_uniquename: &'b str) -> &'b GeneDetails {
if let Some(gene_details) = self.genes.get(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn get_gene_mut<'b>(&'b mut self, gene_uniquename: &'b str) -> &'b mut GeneDetails {
if let Some(gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_gene_short(&self, gene_uniquename: &str) -> GeneShort {
let gene_details = self.get_gene(&gene_uniquename);
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
}
fn make_gene_summary(&self, gene_uniquename: &str) -> GeneSummary {
let gene_details = self.get_gene(&gene_uniquename);
GeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
synonyms: gene_details.synonyms.clone(),
feature_type: gene_details.feature_type.clone(),
organism: gene_details.organism.clone(),
location: gene_details.location.clone(),
}
}
fn make_reference_short(&self, reference_uniquename: &str) -> Option<ReferenceShort> {
if reference_uniquename == "null" {
None
} else {
let reference_details = self.references.get(reference_uniquename).unwrap();
let reference_short =
ReferenceShort {
uniquename: String::from(reference_uniquename),
title: reference_details.title.clone(),
citation: reference_details.citation.clone(),
publication_year: reference_details.publication_year.clone(),
authors: reference_details.authors.clone(),
authors_abbrev: reference_details.authors_abbrev.clone(),
gene_count: reference_details.genes_by_uniquename.keys().len(),
genotype_count: reference_details.genotypes_by_uniquename.keys().len(),
};
Some(reference_short)
}
}
fn make_term_short(&self, termid: &str) -> TermShort {
if let Some(term_details) = self.terms.get(termid) {
TermShort {
name: term_details.name.clone(),
cv_name: term_details.cv_name.clone(),
interesting_parents: term_details.interesting_parents.clone(),
termid: term_details.termid.clone(),
is_obsolete: term_details.is_obsolete,
gene_count: term_details.genes_by_uniquename.keys().len(),
genotype_count: term_details.genotypes_by_uniquename.keys().len(),
}
} else {
panic!("can't find TermDetails for termid: {}", termid)
}
}
fn add_characterisation_status(&mut self, gene_uniquename: &String, cvterm_name: &String) {
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
gene_details.characterisation_status = Some(cvterm_name.clone());
}
fn add_gene_product(&mut self, gene_uniquename: &String, product: &String) {
let mut gene_details = self.get_gene_mut(gene_uniquename);
gene_details.product = Some(product.clone());
}
fn add_name_description(&mut self, gene_uniquename: &str, name_description: &str) {
let mut gene_details = self.get_gene_mut(gene_uniquename);
gene_details.name_descriptions.push(name_description.into());
}
fn add_annotation(&mut self, cvterm: &Cvterm, is_not: bool,
annotation_template: OntAnnotationDetail) {
let termid =
match self.base_term_of_extensions.get(&cvterm.termid()) {
Some(base_termid) => base_termid.clone(),
None => cvterm.termid(),
};
let extension_parts =
match self.parts_of_extensions.get(&cvterm.termid()) {
Some(parts) => parts.clone(),
None => vec![],
};
let mut new_extension = extension_parts.clone();
let mut existing_extensions = annotation_template.extension.clone();
new_extension.append(&mut existing_extensions);
let ont_annotation_detail =
OntAnnotationDetail {
extension: new_extension,
.. annotation_template
};
let annotation_map = if is_not {
&mut self.all_not_ont_annotations
} else {
&mut self.all_ont_annotations
};
let entry = annotation_map.entry(termid.clone());
entry.or_insert(
vec![]
).push(Rc::new(ont_annotation_detail));
}
fn process_references(&mut self) {
for rc_publication in &self.raw.publications {
let reference_uniquename = &rc_publication.uniquename;
let mut pubmed_authors: Option<String> = None;
let mut pubmed_publication_date: Option<String> = None;
let mut pubmed_abstract: Option<String> = None;
for prop in rc_publication.publicationprops.borrow().iter() {
match &prop.prop_type.name as &str {
"pubmed_publication_date" =>
pubmed_publication_date = Some(prop.value.clone()),
"pubmed_authors" =>
pubmed_authors = Some(prop.value.clone()),
"pubmed_abstract" =>
pubmed_abstract = Some(prop.value.clone()),
_ => ()
}
}
let mut authors_abbrev = None;
let mut publication_year = None;
if let Some(authors) = pubmed_authors.clone() {
if authors.contains(",") {
let author_re = Regex::new(r"^(?P<f>[^,]+),.*$").unwrap();
authors_abbrev = Some(author_re.replace_all(&authors, "$f et al."));
} else {
authors_abbrev = Some(authors.clone());
}
}
if let Some(publication_date) = pubmed_publication_date.clone() {
let date_re = Regex::new(r"^(.* )?(?P<y>\d\d\d\d)$").unwrap();
publication_year = Some(date_re.replace_all(&publication_date, "$y"));
}
self.references.insert(reference_uniquename.clone(),
ReferenceDetails {
uniquename: reference_uniquename.clone(),
title: rc_publication.title.clone(),
citation: rc_publication.miniref.clone(),
pubmed_abstract: pubmed_abstract.clone(),
authors: pubmed_authors.clone(),
authors_abbrev: authors_abbrev,
pubmed_publication_date: pubmed_publication_date.clone(),
publication_year: publication_year,
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
}
fn make_feature_rel_maps(&mut self) {
for feature_rel in self.raw.feature_relationships.iter() {
let subject_type_name = &feature_rel.subject.feat_type.name;
let rel_name = &feature_rel.rel_type.name;
let object_type_name = &feature_rel.object.feat_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
if TRANSCRIPT_FEATURE_TYPES.contains(&subject_type_name.as_str()) &&
rel_name == "part_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_transcripts.insert(subject_uniquename.clone(),
object_uniquename.clone());
self.transcript_type_of_genes.insert(object_uniquename.clone(),
subject_type_name.clone());
continue;
}
if subject_type_name == "polypeptide" &&
rel_name == "derives_from" &&
object_type_name == "mRNA" {
self.transcripts_of_polypeptides.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "allele" {
if feature_rel.rel_type.name == "instance_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_alleles.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if feature_rel.rel_type.name == "part_of" &&
object_type_name == "genotype" {
let allele_and_expression =
AlleleAndExpression {
allele_uniquename: subject_uniquename.clone(),
expression: get_feat_rel_expression(feature_rel),
};
let entry = self.alleles_of_genotypes.entry(object_uniquename.clone());
entry.or_insert(Vec::new()).push(allele_and_expression);
continue;
}
}
}
}
fn make_location(&self, feat: &Feature) -> Option<ChromosomeLocation> {
let feature_locs = feat.featurelocs.borrow();
match feature_locs.get(0) {
Some(feature_loc) => {
let start_pos =
if feature_loc.fmin + 1 >= 1 {
(feature_loc.fmin + 1) as u32
} else {
panic!("start_pos less than 1");
};
let end_pos =
if feature_loc.fmax >= 1 {
feature_loc.fmax as u32
} else {
panic!("start_end less than 1");
};
Some(ChromosomeLocation {
chromosome_name: feature_loc.srcfeature.uniquename.clone(),
start_pos: start_pos,
end_pos: end_pos,
strand: match feature_loc.strand {
1 => Strand::Forward,
-1 => Strand::Reverse,
_ => panic!(),
},
})
},
None => None,
}
}
fn store_gene_details(&mut self, feat: &Feature) {
let location = self.make_location(&feat);
let organism = make_organism_short(&feat.organism);
let feature_type =
if let Some(transcript_type) =
self.transcript_type_of_genes.get(&feat.uniquename) {
transcript_type.clone() + " " + &feat.feat_type.name
} else {
feat.feat_type.name.clone()
};
let gene_feature = GeneDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
organism: organism,
product: None,
name_descriptions: vec![],
synonyms: vec![],
feature_type: feature_type,
characterisation_status: None,
location: location,
gene_neighbourhood: vec![],
cds_location: None,
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
target_of_annotations: vec![],
transcripts: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
};
self.genes.insert(feat.uniquename.clone(), gene_feature);
}
fn store_genotype_details(&mut self, feat: &Feature) {
let mut background = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "genotype_background" {
background = prop.value.clone()
}
}
self.genotypes.insert(feat.uniquename.clone(),
GenotypeDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
background: background,
expressed_alleles: vec![],
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
genes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
fn store_allele_details(&mut self, feat: &Feature) {
let mut allele_type = None;
let mut description = None;
for prop in feat.featureprops.borrow().iter() {
match &prop.prop_type.name as &str {
"allele_type" =>
allele_type = prop.value.clone(),
"description" =>
description = prop.value.clone(),
_ => ()
}
}
if allele_type.is_none() {
panic!("no allele_type cvtermprop for {}", &feat.uniquename);
}
let gene_uniquename =
self.genes_of_alleles.get(&feat.uniquename).unwrap();
let allele_details = AlleleShort {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
gene_uniquename: gene_uniquename.clone(),
allele_type: allele_type.unwrap(),
description: description,
};
self.alleles.insert(feat.uniquename.clone(), allele_details);
}
fn process_feature(&mut self, feat: &Feature) {
match &feat.feat_type.name as &str {
"gene" | "pseudogene" =>
self.store_gene_details(feat),
_ => {
if TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) {
self.transcripts.insert(feat.uniquename.clone(),
TranscriptDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
});
}
}
}
}
fn process_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name != "genotype" && feat.feat_type.name != "allele" {
self.process_feature(&feat);
}
}
}
fn add_interesting_parents(&mut self) {
let mut interesting_parents_by_termid: HashMap<String, HashSet<String>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if self.is_interesting_parent(&object_termid, &rel_term_name) {
interesting_parents_by_termid
.entry(subject_termid.clone())
.or_insert(HashSet::new())
.insert(object_termid.into());
};
}
for (termid, interesting_parents) in interesting_parents_by_termid {
let mut term_details = self.terms.get_mut(&termid).unwrap();
term_details.interesting_parents = interesting_parents;
}
}
fn process_allele_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "allele" {
self.store_allele_details(&feat);
}
}
}
fn process_genotype_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "genotype" {
self.store_genotype_details(&feat);
}
}
}
fn add_gene_neighbourhoods(&mut self) {
struct GeneAndLoc {
gene_uniquename: String,
loc: ChromosomeLocation,
};
let mut genes_and_locs: Vec<GeneAndLoc> = vec![];
for gene_details in self.genes.values() {
if let Some(ref location) = gene_details.location {
genes_and_locs.push(GeneAndLoc {
gene_uniquename: gene_details.uniquename.clone(),
loc: location.clone(),
});
}
}
let cmp = |a: &GeneAndLoc, b: &GeneAndLoc| {
let order = a.loc.chromosome_name.cmp(&b.loc.chromosome_name);
if order == Ordering::Equal {
a.loc.start_pos.cmp(&b.loc.start_pos)
} else {
order
}
};
genes_and_locs.sort_by(cmp);
for (i, this_gene_and_loc) in genes_and_locs.iter().enumerate() {
let mut nearby_genes: Vec<GeneShort> = vec![];
if i > 0 {
let start_index =
if i > GENE_NEIGHBOURHOOD_DISTANCE {
i - GENE_NEIGHBOURHOOD_DISTANCE
} else {
0
};
for back_index in (start_index..i).rev() {
let back_gene_and_loc = &genes_and_locs[back_index];
if back_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let back_gene_short = self.make_gene_short(&back_gene_and_loc.gene_uniquename);
nearby_genes.insert(0, back_gene_short);
}
}
let gene_short = self.make_gene_short(&this_gene_and_loc.gene_uniquename);
nearby_genes.push(gene_short);
if i < genes_and_locs.len() - 1 {
let end_index =
if i + GENE_NEIGHBOURHOOD_DISTANCE >= genes_and_locs.len() {
genes_and_locs.len()
} else {
i + GENE_NEIGHBOURHOOD_DISTANCE + 1
};
for forward_index in i+1..end_index {
let forward_gene_and_loc = &genes_and_locs[forward_index];
if forward_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let forward_gene_short = self.make_gene_short(&forward_gene_and_loc.gene_uniquename);
nearby_genes.push(forward_gene_short);
}
}
let mut this_gene_details =
self.genes.get_mut(&this_gene_and_loc.gene_uniquename).unwrap();
this_gene_details.gene_neighbourhood.append(&mut nearby_genes);
}
}
fn add_alleles_to_genotypes(&mut self) {
let mut alleles_to_add: HashMap<String, Vec<ExpressedAllele>> = HashMap::new();
for genotype_uniquename in self.genotypes.keys() {
let allele_uniquenames: Vec<AlleleAndExpression> =
self.alleles_of_genotypes.get(genotype_uniquename).unwrap().clone();
let expressed_allele_vec: Vec<ExpressedAllele> =
allele_uniquenames.iter()
.map(|ref allele_and_expression| {
ExpressedAllele {
allele_uniquename: allele_and_expression.allele_uniquename.clone(),
expression: allele_and_expression.expression.clone(),
}
})
.collect();
alleles_to_add.insert(genotype_uniquename.clone(), expressed_allele_vec);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
genotype_details.expressed_alleles =
alleles_to_add.remove(genotype_uniquename).unwrap();
}
}
// add interaction, ortholog and paralog annotations
fn process_annotation_feature_rels(&mut self) {
for feature_rel in self.raw.feature_relationships.iter() {
let rel_name = &feature_rel.rel_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
for rel_config in FEATURE_REL_CONFIGS.iter() {
if rel_name == rel_config.rel_type_name &&
is_gene_type(&feature_rel.subject.feat_type.name) &&
is_gene_type(&feature_rel.object.feat_type.name) {
let mut evidence: Option<Evidence> = None;
let borrowed_publications = feature_rel.publications.borrow();
let maybe_publication = borrowed_publications.get(0).clone();
let maybe_reference_uniquename =
match maybe_publication {
Some(publication) => Some(publication.uniquename.clone()),
None => None,
};
for prop in feature_rel.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "evidence" {
if let Some(evidence_long) = prop.value.clone() {
if let Some(code) = self.config.evidence_types.get(&evidence_long) {
evidence = Some(code.clone());
} else {
evidence = Some(evidence_long);
}
}
}
}
let evidence_clone = evidence.clone();
let gene_uniquename = subject_uniquename;
let gene_organism_short = {
self.genes.get(subject_uniquename).unwrap().organism.clone()
};
let other_gene_uniquename = object_uniquename;
let other_gene_organism_short = {
self.genes.get(object_uniquename).unwrap().organism.clone()
};
{
let mut gene_details = self.genes.get_mut(subject_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {
let interaction_annotation =
InteractionAnnotation {
gene_uniquename: gene_uniquename.clone(),
interactor_uniquename: other_gene_uniquename.clone(),
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
if rel_name == "interacts_physically" {
gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if rel_name == "interacts_physically" {
ref_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
ref_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: gene_uniquename.clone(),
ortholog_uniquename: other_gene_uniquename.clone(),
ortholog_organism: other_gene_organism_short,
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
ref_details.ortholog_annotations.push(ortholog_annotation);
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: gene_uniquename.clone(),
paralog_uniquename: other_gene_uniquename.clone(),
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
ref_details.paralog_annotations.push(paralog_annotation);
}
}
}
}
{
// for orthologs and paralogs, store the reverses annotation too
let mut other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {},
FeatureRelAnnotationType::Ortholog =>
other_gene_details.ortholog_annotations.push(
OrthologAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
ortholog_uniquename: gene_uniquename.clone(),
ortholog_organism: gene_organism_short,
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
}),
FeatureRelAnnotationType::Paralog =>
other_gene_details.paralog_annotations.push(
ParalogAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
paralog_uniquename: gene_uniquename.clone(),
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename
}),
}
}
}
}
}
for (_, ref_details) in &mut self.references {
ref_details.physical_interactions.sort();
ref_details.genetic_interactions.sort();
ref_details.ortholog_annotations.sort();
ref_details.paralog_annotations.sort();
}
for (_, gene_details) in &mut self.genes {
gene_details.physical_interactions.sort();
gene_details.genetic_interactions.sort();
gene_details.ortholog_annotations.sort();
gene_details.paralog_annotations.sort();
}
}
fn matching_ext_config(&self, annotation_termid: &str,
rel_type_name: &str) -> Option<ExtensionConfig> {
let ext_configs = &self.config.extensions;
if let Some(annotation_term_details) = self.terms.get(annotation_termid) {
for ext_config in ext_configs {
if ext_config.rel_name == rel_type_name {
if let Some(if_descendent_of) = ext_config.if_descendent_of.clone() {
if annotation_term_details.interesting_parents.contains(&if_descendent_of) {
return Some((*ext_config).clone());
}
} else {
return Some((*ext_config).clone());
}
}
}
} else {
panic!("can't find details for term: {}\n", annotation_termid);
}
None
}
// create and returns any TargetOfAnnotations implied by the extension
fn make_target_of_for_ext(&self, cv_name: &String,
maybe_gene_uniquename: &Option<String>,
maybe_genotype_uniquename: &Option<String>,
reference_uniquename: &Option<String>,
annotation_termid: &String,
extension: &Vec<ExtPart>) -> Vec<(GeneUniquename, TargetOfAnnotation)> {
let mut ret_vec = vec![];
for ext_part in extension {
let maybe_ext_config =
self.matching_ext_config(annotation_termid, &ext_part.rel_type_name);
if let ExtRange::Gene(ref target_gene_uniquename) = ext_part.ext_range {
if let Some(ext_config) = maybe_ext_config {
if let Some(reciprocal_display_name) =
ext_config.reciprocal_display {
let (annotation_gene_uniquename, annotation_genotype_uniquename) =
if maybe_genotype_uniquename.is_some() {
(None, maybe_genotype_uniquename.clone())
} else {
(maybe_gene_uniquename.clone(), None)
};
ret_vec.push(((*target_gene_uniquename).clone(),
TargetOfAnnotation {
ontology_name: cv_name.clone(),
ext_rel_display_name: reciprocal_display_name,
gene_uniquename: annotation_gene_uniquename,
genotype_uniquename: annotation_genotype_uniquename,
reference_uniquename: reference_uniquename.clone(),
}));
}
}
}
}
ret_vec
}
fn add_target_of_annotations(&mut self) {
let mut target_of_annotations: HashMap<GeneUniquename, HashSet<TargetOfAnnotation>> =
HashMap::new();
for (_, term_details) in &self.terms {
for rel_annotation in &term_details.rel_annotations {
for annotation in rel_annotation.annotations.iter() {
let new_annotations =
self.make_target_of_for_ext(&term_details.cv_name,
&annotation.gene_uniquename,
&annotation.genotype_uniquename,
&annotation.reference_uniquename,
&term_details.termid, &annotation.extension);
for (target_gene_uniquename, new_annotation) in new_annotations {
target_of_annotations
.entry(target_gene_uniquename.clone())
.or_insert(HashSet::new())
.insert(new_annotation);
}
}
}
}
for (gene_uniquename, mut target_of_annotations) in target_of_annotations {
let mut gene_details = self.genes.get_mut(&gene_uniquename).unwrap();
gene_details.target_of_annotations = target_of_annotations.drain().collect();
}
}
fn make_all_cv_summaries(&mut self) {
for (_, term_details) in &mut self.terms {
term_details.rel_summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
true, true, &term_details.rel_annotations);
}
for (_, gene_details) in &mut self.genes {
for (cv_name, term_annotations) in &mut gene_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
false, true, &term_annotations);
gene_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
for (_, genotype_details) in &mut self.genotypes {
for (cv_name, term_annotations) in &mut genotype_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
false, false, &term_annotations);
genotype_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
for (_, reference_details) in &mut self.references {
for (cv_name, term_annotations) in &mut reference_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
true, true, &term_annotations);
reference_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
}
fn process_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name != POMBASE_ANN_EXT_TERM_CV_NAME {
let cv_config =
self.config.cv_config_by_name(&cvterm.cv.name);
let annotation_feature_type =
cv_config.feature_type.clone();
self.terms.insert(cvterm.termid(),
TermDetails {
name: cvterm.name.clone(),
cv_name: cvterm.cv.name.clone(),
annotation_feature_type: annotation_feature_type,
interesting_parents: HashSet::new(),
termid: cvterm.termid(),
definition: cvterm.definition.clone(),
direct_ancestors: vec![],
is_obsolete: cvterm.is_obsolete,
single_allele_genotype_uniquenames: HashSet::new(),
rel_annotations: vec![],
rel_summaries: vec![],
not_rel_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
}
}
fn get_ext_rel_display_name(&self, annotation_termid: &String,
ext_rel_name: &String) -> String {
if let Some(ext_conf) = self.matching_ext_config(annotation_termid, ext_rel_name) {
ext_conf.display_name.clone()
} else {
let re = Regex::new("_").unwrap();
re.replace_all(&ext_rel_name, " ")
}
}
fn process_extension_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
for cvtermprop in cvterm.cvtermprops.borrow().iter() {
if (*cvtermprop).prop_type.name.starts_with(ANNOTATION_EXT_REL_PREFIX) {
let ext_rel_name_str =
&(*cvtermprop).prop_type.name[ANNOTATION_EXT_REL_PREFIX.len()..];
let ext_rel_name = String::from(ext_rel_name_str);
let ext_range = (*cvtermprop).value.clone();
let range: ExtRange = if ext_range.starts_with("SP") {
ExtRange::Gene(ext_range)
} else {
ExtRange::Misc(ext_range)
};
if let Some(base_termid) =
self.base_term_of_extensions.get(&cvterm.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(&base_termid, &ext_rel_name);
self.parts_of_extensions.entry(cvterm.termid())
.or_insert(Vec::new()).push(ExtPart {
rel_type_name: String::from(ext_rel_name),
rel_type_display_name: rel_type_display_name,
ext_range: range,
});
} else {
panic!("can't find details for term: {}\n", cvterm.termid());
}
}
}
}
}
}
fn process_cvterm_rels(&mut self) {
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name == "is_a" {
self.base_term_of_extensions.insert(subject_termid.clone(),
object_term.termid().clone());
}
} else {
let object_term_short =
self.make_term_short(&object_term.termid());
if let Some(ref mut subject_term_details) = self.terms.get_mut(&subject_term.termid()) {
subject_term_details.direct_ancestors.push(TermAndRelation {
termid: object_term_short.termid.clone(),
term_name: object_term_short.name.clone(),
relation_name: rel_type.name.clone(),
});
}
}
}
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name != "is_a" {
if let Some(base_termid) =
self.base_term_of_extensions.get(&subject_term.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(base_termid, &rel_type.name);
self.parts_of_extensions.entry(subject_termid)
.or_insert(Vec::new()).push(ExtPart {
rel_type_name: rel_type.name.clone(),
rel_type_display_name: rel_type_display_name,
ext_range: ExtRange::Term(object_term.termid().clone()),
});
} else {
panic!("can't find details for {}\n", object_term.termid());
}
}
}
}
}
fn process_feature_synonyms(&mut self) {
for feature_synonym in self.raw.feature_synonyms.iter() {
let feature = &feature_synonym.feature;
let synonym = &feature_synonym.synonym;
if let Some(ref mut gene_details) = self.genes.get_mut(&feature.uniquename) {
gene_details.synonyms.push(SynonymDetails {
name: synonym.name.clone(),
synonym_type: synonym.synonym_type.name.clone()
});
}
}
}
fn make_genotype_short(&self, genotype_uniquename: &str) -> GenotypeShort {
let details = self.genotypes.get(genotype_uniquename).unwrap().clone();
GenotypeShort {
uniquename: details.uniquename,
name: details.name,
background: details.background,
expressed_alleles: details.expressed_alleles,
}
}
fn make_allele_short(&self, allele_uniquename: &str) -> AlleleShort {
self.alleles.get(allele_uniquename).unwrap().clone()
}
// process feature properties stored as cvterms,
// eg. characterisation_status and product
fn process_props_from_feature_cvterms(&mut self) {
for feature_cvterm in self.raw.feature_cvterms.iter() {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let gene_uniquenames_vec: Vec<GeneUniquename> =
if cvterm.cv.name == "PomBase gene products" {
if feature.feat_type.name == "polypeptide" {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
} else {
vec![]
};
for gene_uniquename in &gene_uniquenames_vec {
self.add_gene_product(&gene_uniquename, &cvterm.name);
}
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
if cvterm.cv.name == "PomBase gene characterisation status" {
self.add_characterisation_status(&feature.uniquename, &cvterm.name);
} else {
if cvterm.cv.name == "name_description" {
self.add_name_description(&feature.uniquename, &cvterm.name);
}
}
}
}
}
fn get_gene_prod_extension(&self, prod_value: &String) -> ExtPart {
let ext_range =
if prod_value.starts_with("PR:") {
ExtRange::GeneProduct(prod_value.clone())
} else {
ExtRange::Misc(prod_value.clone())
};
ExtPart {
rel_type_name: "active_form".into(),
rel_type_display_name: "active form".into(),
ext_range: ext_range,
}
}
// return a fake extension for "with" properties on protein binding annotations
fn get_with_extension(&self, with_value: &String) -> ExtPart {
let ext_range =
if with_value.starts_with("SP%") {
ExtRange::Gene(with_value.clone())
} else {
if with_value.starts_with("PomBase:SP") {
let gene_uniquename =
String::from(&with_value[8..]);
ExtRange::Gene(gene_uniquename)
} else {
if with_value.to_lowercase().starts_with("pfam:") {
ExtRange::Domain(with_value.clone())
} else {
ExtRange::Misc(with_value.clone())
}
}
};
// a with property on a protein binding (GO:0005515) is
// displayed as a binds extension
// https://github.com/pombase/website/issues/108
ExtPart {
rel_type_name: "binds".into(),
rel_type_display_name: "binds".into(),
ext_range: ext_range,
}
}
fn make_with_or_from_value(&self, with_or_from_value: String) -> WithFromValue {
let db_prefix_patt = String::from("^") + DB_NAME + ":";
let re = Regex::new(&db_prefix_patt).unwrap();
let gene_uniquename = re.replace_all(&with_or_from_value, "");
if self.genes.contains_key(&gene_uniquename) {
let gene_short = self.make_gene_short(&gene_uniquename);
WithFromValue::Gene(gene_short)
} else {
if self.terms.get(&with_or_from_value).is_some() {
WithFromValue::Term(self.make_term_short(&with_or_from_value))
} else {
WithFromValue::Identifier(with_or_from_value)
}
}
}
// add the with value as a fake extension if the cvterm is_a protein binding,
// otherwise return the value
fn make_with_extension(&self, termid: &String, evidence_code: Option<String>,
extension: &mut Vec<ExtPart>,
with_value: String) -> WithFromValue {
let base_termid =
match self.base_term_of_extensions.get(termid) {
Some(base_termid) => base_termid.clone(),
None => termid.clone(),
};
let base_term_short = self.make_term_short(&base_termid);
if evidence_code.is_some() &&
evidence_code.unwrap() == "IPI" &&
(base_term_short.termid == "GO:0005515" ||
base_term_short.interesting_parents
.contains("GO:0005515")) {
extension.push(self.get_with_extension(&with_value));
} else {
return self.make_with_or_from_value(with_value);
}
WithFromValue::None
}
// process annotation
fn process_feature_cvterms(&mut self) {
for feature_cvterm in self.raw.feature_cvterms.iter() {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let mut extension = vec![];
if cvterm.cv.name == "PomBase gene characterisation status" ||
cvterm.cv.name == "PomBase gene products" ||
cvterm.cv.name == "name_description" {
continue;
}
let publication = &feature_cvterm.publication;
let mut extra_props: HashMap<String, String> = HashMap::new();
let mut conditions: Vec<TermId> = vec![];
let mut with: WithFromValue = WithFromValue::None;
let mut from: WithFromValue = WithFromValue::None;
let mut qualifiers: Vec<Qualifier> = vec![];
let mut evidence: Option<String> = None;
let mut raw_with_value: Option<String> = None;
for ref prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
match &prop.type_name() as &str {
"residue" | "scale" |
"quant_gene_ex_copies_per_cell" |
"quant_gene_ex_avg_copies_per_cell" => {
if let Some(value) = prop.value.clone() {
extra_props.insert(prop.type_name().clone(), value);
}
},
"evidence" =>
if let Some(evidence_long) = prop.value.clone() {
if let Some(code) = self.config.evidence_types.get(&evidence_long) {
evidence = Some(code.clone());
} else {
evidence = Some(evidence_long);
}
},
"condition" =>
if let Some(value) = prop.value.clone() {
conditions.push(value.clone());
},
"qualifier" =>
if let Some(value) = prop.value.clone() {
qualifiers.push(value);
},
"with" => {
raw_with_value = prop.value.clone();
},
"from" => {
if let Some(value) = prop.value.clone() {
from = self.make_with_or_from_value(value);
}
},
"gene_product_form_id" => {
if let Some(value) = prop.value.clone() {
extension.push(self.get_gene_prod_extension(&value));
}
},
_ => ()
}
}
if let Some(value) = raw_with_value {
let with_gene_short =
self.make_with_extension(&cvterm.termid(), evidence.clone(),
&mut extension, value);
if with_gene_short.is_some() {
with = with_gene_short;
}
}
let mut maybe_genotype_uniquename = None;
let mut gene_uniquenames_vec: Vec<GeneUniquename> =
match &feature.feat_type.name as &str {
"polypeptide" => {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
},
"genotype" => {
let genotype_short = self.make_genotype_short(&feature.uniquename);
maybe_genotype_uniquename = Some(genotype_short.uniquename.clone());
genotype_short.expressed_alleles.iter()
.map(|expressed_allele| {
let allele_short =
self.make_allele_short(&expressed_allele.allele_uniquename);
allele_short.gene_uniquename.clone()
})
.collect()
},
_ => {
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
vec![feature.uniquename.clone()]
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
}
};
gene_uniquenames_vec.dedup();
let reference_uniquename =
if publication.uniquename == "null" {
None
} else {
Some(publication.uniquename.clone())
};
for gene_uniquename in &gene_uniquenames_vec {
let mut extra_props_clone = extra_props.clone();
let copies_per_cell = extra_props_clone.remove("quant_gene_ex_copies_per_cell");
let avg_copies_per_cell = extra_props_clone.remove("quant_gene_ex_avg_copies_per_cell");
let scale = extra_props_clone.remove("scale");
let gene_ex_props =
if copies_per_cell.is_some() || avg_copies_per_cell.is_some() {
Some(GeneExProps {
copies_per_cell: copies_per_cell,
avg_copies_per_cell: avg_copies_per_cell,
scale: scale,
})
} else {
None
};
let maybe_gene_uniquename = Some(gene_uniquename.clone());
let annotation = OntAnnotationDetail {
id: feature_cvterm.feature_cvterm_id,
gene_uniquename: maybe_gene_uniquename,
reference_uniquename: reference_uniquename.clone(),
genotype_uniquename: maybe_genotype_uniquename.clone(),
with: with.clone(),
from: from.clone(),
residue: extra_props_clone.remove("residue"),
gene_ex_props: gene_ex_props,
qualifiers: qualifiers.clone(),
evidence: evidence.clone(),
conditions: conditions.clone(),
extension: extension.clone(),
};
self.add_annotation(cvterm.borrow(), feature_cvterm.is_not,
annotation);
}
}
}
fn make_term_annotations(&self, termid: &str, details: &Vec<Rc<OntAnnotationDetail>>,
is_not: bool)
-> Vec<(CvName, OntTermAnnotations)> {
let term_short = self.make_term_short(termid);
let cv_name = term_short.cv_name.clone();
match cv_name.as_ref() {
"gene_ex" => {
if is_not {
panic!("gene_ex annotations can't be NOT annotations");
}
let mut qual_annotations =
OntTermAnnotations {
term: term_short.clone(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
};
let mut quant_annotations =
OntTermAnnotations {
term: term_short.clone(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
};
for detail in details {
if detail.gene_ex_props.is_some() {
quant_annotations.annotations.push(detail.clone())
} else {
qual_annotations.annotations.push(detail.clone())
}
}
let mut return_vec = vec![];
if qual_annotations.annotations.len() > 0 {
return_vec.push((String::from("qualitative_gene_expression"),
qual_annotations));
}
if quant_annotations.annotations.len() > 0 {
return_vec.push((String::from("quantitative_gene_expression"),
quant_annotations));
}
return_vec
},
"fission_yeast_phenotype" => {
let mut single_allele =
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
};
let mut multi_allele =
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
};
for detail in details {
let genotype_uniquename = detail.genotype_uniquename.clone().unwrap();
if let Some(genotype_details) = self.genotypes.get(&genotype_uniquename) {
if genotype_details.expressed_alleles.len() == 1 {
single_allele.annotations.push(detail.clone())
} else {
multi_allele.annotations.push(detail.clone())
}
} else {
panic!("can't find genotype details for {}\n", genotype_uniquename);
}
}
let mut return_vec = vec![];
if single_allele.annotations.len() > 0 {
return_vec.push((String::from("single_allele_phenotype"),
single_allele));
}
if multi_allele.annotations.len() > 0 {
return_vec.push((String::from("multi_allele_phenotype"),
multi_allele));
}
return_vec
},
_ => {
vec![(cv_name,
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: details.clone(),
})]
}
}
}
// store the OntTermAnnotations in the TermDetails, GeneDetails,
// GenotypeDetails and ReferenceDetails
fn store_ont_annotations(&mut self, is_not: bool) {
let ont_annotations = if is_not {
&self.all_not_ont_annotations
} else {
&self.all_ont_annotations
};
let mut gene_annotation_by_term: HashMap<GeneUniquename, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
let mut genotype_annotation_by_term: HashMap<GenotypeUniquename, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
let mut ref_annotation_by_term: HashMap<String, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
for (termid, annotations) in ont_annotations {
let term_short = self.make_term_short(termid);
if let Some(ref mut term_details) = self.terms.get_mut(termid) {
let new_rel_ont_annotation = OntTermAnnotations {
rel_names: HashSet::new(),
is_not: is_not,
term: term_short.clone(),
annotations: annotations.clone(),
};
if is_not {
term_details.not_rel_annotations.push(new_rel_ont_annotation);
} else {
term_details.rel_annotations.push(new_rel_ont_annotation);
}
} else {
panic!("missing termid: {}\n", termid);
}
for detail in annotations {
gene_annotation_by_term.entry(detail.gene_uniquename.clone().unwrap())
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![])
.push(detail.clone());
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
let mut existing =
genotype_annotation_by_term.entry(genotype_uniquename.clone())
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![]);
if !existing.contains(detail) {
existing.push(detail.clone());
}
}
if let Some(reference_uniquename) = detail.reference_uniquename.clone() {
ref_annotation_by_term.entry(reference_uniquename)
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![])
.push(detail.clone());
}
for condition_termid in &detail.conditions {
let condition_term_short = {
self.make_term_short(&condition_termid)
};
if let Some(ref mut condition_term_details) =
self.terms.get_mut(&condition_termid.clone())
{
if condition_term_details.rel_annotations.len() == 0 {
condition_term_details.rel_annotations.push(
OntTermAnnotations {
term: condition_term_short,
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
});
}
if let Some(rel_annotation) = condition_term_details.rel_annotations.get_mut(0) {
rel_annotation.annotations.push(detail.clone())
}
}
}
}
}
for (gene_uniquename, term_annotation_map) in &gene_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
gene_details.cv_annotations.entry(cv_name.clone())
.or_insert(Vec::new())
.push(new_annotation);
}
}
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (_, mut cv_annotations) in &mut gene_details.cv_annotations {
cv_annotations.sort()
}
}
for (genotype_uniquename, term_annotation_map) in &genotype_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
details.cv_annotations.entry(cv_name.clone())
.or_insert(Vec::new())
.push(new_annotation);
}
}
let mut details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (_, mut cv_annotations) in &mut details.cv_annotations {
cv_annotations.sort()
}
}
for (reference_uniquename, ref_annotation_map) in &ref_annotation_by_term {
for (termid, details) in ref_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
ref_details.cv_annotations.entry(cv_name).or_insert(Vec::new())
.push(new_annotation.clone());
}
}
let mut ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (_, mut term_annotations) in &mut ref_details.cv_annotations {
term_annotations.sort()
}
}
}
fn is_interesting_parent(&self, termid: &str, rel_name: &str) -> bool {
for parent_conf in INTERESTING_PARENTS.iter() {
if parent_conf.termid == termid &&
parent_conf.rel_name == rel_name {
return true;
}
}
for ext_conf in &self.config.extensions {
if let Some(ref conf_termid) = ext_conf.if_descendent_of {
if conf_termid == termid && rel_name == "is_a" {
return true;
}
}
}
false
}
fn process_cvtermpath(&mut self) {
let mut annotation_by_id: HashMap<i32, Rc<OntAnnotationDetail>> = HashMap::new();
let mut new_annotations: HashMap<TermId, HashMap<TermId, HashMap<i32, HashSet<RelName>>>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
if let Some(subject_term_details) = self.terms.get(&subject_termid) {
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if rel_term_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&subject_term_details.cv_name.as_str()) {
continue;
}
if !DESCENDANT_REL_NAMES.contains(&rel_term_name.as_str()) {
continue;
}
let annotations = &subject_term_details.rel_annotations;
for rel_annotation in annotations {
let OntTermAnnotations {
rel_names: _,
is_not: _,
term: _,
annotations: existing_details
} = rel_annotation.clone();
for detail in &existing_details {
if !annotation_by_id.contains_key(&detail.id) {
annotation_by_id.insert(detail.id, detail.clone());
}
let (dest_termid, source_termid) =
(object_termid.clone(), subject_termid.clone());
new_annotations.entry(dest_termid)
.or_insert(HashMap::new())
.entry(source_termid)
.or_insert(HashMap::new())
.entry(detail.id)
.or_insert(HashSet::new())
.insert(rel_term_name.clone());
}
}
} else {
panic!("TermDetails not found for {}", &subject_termid);
}
}
for (dest_termid, dest_annotations_map) in new_annotations.drain() {
for (source_termid, source_annotations_map) in dest_annotations_map {
let mut new_details: Vec<Rc<OntAnnotationDetail>> = vec![];
let mut all_rel_names: HashSet<String> = HashSet::new();
for (id, rel_names) in source_annotations_map {
let detail = annotation_by_id.get(&id).unwrap().clone();
new_details.push(detail);
for rel_name in rel_names {
all_rel_names.insert(rel_name);
}
}
let source_term_short = self.make_term_short(&source_termid);
let mut dest_term_details = {
self.terms.get_mut(&dest_termid).unwrap()
};
dest_term_details.rel_annotations.push(OntTermAnnotations {
rel_names: all_rel_names,
is_not: false,
term: source_term_short.clone(),
annotations: new_details,
});
}
}
}
fn make_metadata(&mut self) -> Metadata {
let mut db_creation_datetime = None;
for chadoprop in &self.raw.chadoprops {
if chadoprop.prop_type.name == "db_creation_datetime" {
db_creation_datetime = chadoprop.value.clone();
}
}
const PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
Metadata {
export_prog_name: String::from(PKG_NAME),
export_prog_version: String::from(VERSION),
db_creation_datetime: db_creation_datetime.unwrap(),
gene_count: self.genes.len(),
term_count: self.terms.len(),
}
}
pub fn make_search_api_maps(&self) -> SearchAPIMaps {
let mut gene_summaries: Vec<GeneSummary> = vec![];
let gene_uniquenames: Vec<String> =
self.genes.keys().map(|uniquename| uniquename.clone()).collect();
for gene_uniquename in gene_uniquenames {
gene_summaries.push(self.make_gene_summary(&gene_uniquename));
}
let mut term_summaries: HashSet<TermShort> = HashSet::new();
let mut termid_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_name_genes: HashMap<TermName, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
term_summaries.insert(self.make_term_short(&termid));
for gene_uniquename in term_details.genes_by_uniquename.keys() {
termid_genes.entry(termid.clone())
.or_insert(HashSet::new())
.insert(gene_uniquename.clone());
term_name_genes.entry(term_details.name.clone())
.or_insert(HashSet::new())
.insert(gene_uniquename.clone());
}
}
SearchAPIMaps {
gene_summaries: gene_summaries,
termid_genes: termid_genes,
term_name_genes: term_name_genes,
term_summaries: term_summaries,
}
}
fn add_cv_annotations_to_maps(&self,
identifier: &String,
cv_annotations: &OntAnnotationMap,
seen_references: &mut HashMap<String, ReferenceShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_terms: &mut HashMap<String, TermShortMap>) {
for (_, feat_annotations) in cv_annotations {
for feat_annotation in feat_annotations.iter() {
for detail in &feat_annotation.annotations {
self.add_ref_to_hash(seen_references,
identifier.clone(), detail.reference_uniquename.clone());
for condition_termid in &detail.conditions {
self.add_term_to_hash(seen_terms,
identifier.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(seen_terms, identifier.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(seen_genes, identifier.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
self.add_genotype_to_hash(seen_genotypes, seen_alleles, seen_genes,
identifier.clone(),
&genotype_uniquename);
}
}
}
}
}
fn set_term_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (termid, term_details) in &self.terms {
for rel_annotation in &term_details.rel_annotations {
for detail in &rel_annotation.annotations {
let gene_uniquename = detail.gene_uniquename.clone();
self.add_gene_to_hash(&mut seen_genes, termid.clone(), gene_uniquename.unwrap().clone());
self.add_ref_to_hash(&mut seen_references, termid.clone(), detail.reference_uniquename.clone());
for condition_termid in &detail.conditions {
self.add_term_to_hash(&mut seen_terms, termid.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, termid.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, termid.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles,
&mut seen_genes, termid.clone(),
&genotype_uniquename);
}
}
}
}
for (termid, term_details) in &mut self.terms {
if let Some(genes) = seen_genes.remove(termid) {
term_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(termid) {
term_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(termid) {
term_details.alleles_by_uniquename = alleles;
}
if let Some(references) = seen_references.remove(termid) {
term_details.references_by_uniquename = references;
}
if let Some(terms) = seen_terms.remove(termid) {
term_details.terms_by_termid = terms;
}
}
}
fn set_gene_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
{
for (gene_uniquename, gene_details) in &self.genes {
self.add_cv_annotations_to_maps(&gene_uniquename,
&gene_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
let interaction_iter =
gene_details.physical_interactions.iter().chain(&gene_details.genetic_interactions);
for interaction in interaction_iter {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), interaction.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), interaction.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &gene_details.ortholog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), ortholog_annotation.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), ortholog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), ortholog_annotation.ortholog_uniquename.clone());
}
for paralog_annotation in &gene_details.paralog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), paralog_annotation.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), paralog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), paralog_annotation.paralog_uniquename.clone());
}
for target_of_annotation in &gene_details.target_of_annotations {
if let Some(ref annotation_gene_uniquename) = target_of_annotation.gene_uniquename {
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(),
annotation_gene_uniquename.clone());
}
if let Some(ref annotation_genotype_uniquename) = target_of_annotation.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
gene_uniquename.clone(),
&annotation_genotype_uniquename.clone())
}
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(),
target_of_annotation.reference_uniquename.clone());
}
}
}
for (gene_uniquename, gene_details) in &mut self.genes {
if let Some(references) = seen_references.remove(gene_uniquename) {
gene_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(gene_uniquename) {
gene_details.alleles_by_uniquename = alleles;
}
if let Some(genes) = seen_genes.remove(gene_uniquename) {
gene_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(gene_uniquename) {
gene_details.genotypes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(gene_uniquename) {
gene_details.terms_by_termid = terms;
}
}
}
fn set_genotype_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (genotype_uniquename, genotype_details) in &self.genotypes {
self.add_cv_annotations_to_maps(&genotype_uniquename,
&genotype_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
if let Some(references) = seen_references.remove(genotype_uniquename) {
genotype_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(genotype_uniquename) {
genotype_details.alleles_by_uniquename = alleles;
}
if let Some(genotypes) = seen_genes.remove(genotype_uniquename) {
genotype_details.genes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(genotype_uniquename) {
genotype_details.terms_by_termid = terms;
}
}
}
fn set_reference_details_maps(&mut self) {
type GeneShortMap = HashMap<GeneUniquename, GeneShort>;
let mut seen_genes: HashMap<String, GeneShortMap> = HashMap::new();
type GenotypeShortMap = HashMap<GenotypeUniquename, GenotypeShort>;
let mut seen_genotypes: HashMap<ReferenceUniquename, GenotypeShortMap> = HashMap::new();
type AlleleShortMap = HashMap<AlleleUniquename, AlleleShort>;
let mut seen_alleles: HashMap<TermId, AlleleShortMap> = HashMap::new();
type TermShortMap = HashMap<TermId, TermShort>;
let mut seen_terms: HashMap<GeneUniquename, TermShortMap> = HashMap::new();
{
for (reference_uniquename, reference_details) in &self.references {
for (_, feat_annotations) in &reference_details.cv_annotations {
for feat_annotation in feat_annotations.iter() {
for detail in &feat_annotation.annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(),
detail.gene_uniquename.clone().unwrap());
for condition_termid in &detail.conditions {
self.add_term_to_hash(&mut seen_terms, reference_uniquename.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, reference_uniquename.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
let genotype = self.make_genotype_short(genotype_uniquename);
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
reference_uniquename.clone(),
&genotype.uniquename);
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), interaction.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), ortholog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), ortholog_annotation.ortholog_uniquename.clone());
}
for paralog_annotation in &reference_details.paralog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), paralog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), paralog_annotation.paralog_uniquename.clone());
}
}
}
for (reference_uniquename, reference_details) in &mut self.references {
if let Some(genes) = seen_genes.remove(reference_uniquename) {
reference_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(reference_uniquename) {
reference_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(reference_uniquename) {
reference_details.alleles_by_uniquename = alleles;
}
if let Some(terms) = seen_terms.remove(reference_uniquename) {
reference_details.terms_by_termid = terms;
}
}
}
pub fn set_counts(&mut self) {
let mut term_seen_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_seen_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut term_seen_single_allele_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut ref_seen_genes: HashMap<ReferenceUniquename, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
let mut seen_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
let mut seen_single_allele_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
for rel_annotation in &term_details.rel_annotations {
for annotation in &rel_annotation.annotations {
seen_genes.insert(annotation.gene_uniquename.clone().unwrap());
if let Some(ref genotype_uniquename) = annotation.genotype_uniquename {
seen_genotypes.insert(genotype_uniquename.clone());
let genotype = self.genotypes.get(genotype_uniquename).unwrap();
if genotype.expressed_alleles.len() == 1 {
seen_single_allele_genotypes.insert(genotype_uniquename.clone());
}
}
}
}
term_seen_genes.insert(termid.clone(), seen_genes);
term_seen_genotypes.insert(termid.clone(), seen_genotypes);
term_seen_single_allele_genotypes.insert(termid.clone(), seen_single_allele_genotypes);
}
for (reference_uniquename, reference_details) in &self.references {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
for (_, rel_annotations) in &reference_details.cv_annotations {
for rel_annotation in rel_annotations {
for annotation in &rel_annotation.annotations {
if !rel_annotation.is_not {
seen_genes.insert(annotation.gene_uniquename.clone().unwrap());
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
seen_genes.insert(interaction.gene_uniquename.clone());
seen_genes.insert(interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
seen_genes.insert(ortholog_annotation.gene_uniquename.clone());
}
ref_seen_genes.insert(reference_uniquename.clone(), seen_genes);
}
for (_, gene_details) in &mut self.genes {
for (_, feat_annotations) in &mut gene_details.cv_annotations {
for mut feat_annotation in feat_annotations.iter_mut() {
feat_annotation.term.gene_count =
term_seen_genes.get(&feat_annotation.term.termid).unwrap().len();
feat_annotation.term.genotype_count =
term_seen_genotypes.get(&feat_annotation.term.termid).unwrap().len();
}
}
for (reference_uniquename, reference_short) in
&mut gene_details.references_by_uniquename {
reference_short.gene_count =
ref_seen_genes.get(reference_uniquename).unwrap().len();
}
}
for (_, genotype_details) in &mut self.genotypes {
for (_, feat_annotations) in &mut genotype_details.cv_annotations {
for mut feat_annotation in feat_annotations.iter_mut() {
feat_annotation.term.genotype_count =
term_seen_genotypes.get(&feat_annotation.term.termid).unwrap().len();
}
}
}
for (_, ref_details) in &mut self.references {
for (_, ref_annotations) in &mut ref_details.cv_annotations {
for ref_annotation in ref_annotations {
ref_annotation.term.gene_count =
term_seen_genes.get(&ref_annotation.term.termid).unwrap().len();
ref_annotation.term.genotype_count =
term_seen_genotypes.get(&ref_annotation.term.termid).unwrap().len();
}
}
}
for (_, term_details) in &mut self.terms {
for rel_annotation in &mut term_details.rel_annotations {
rel_annotation.term.gene_count =
term_seen_genes.get(&rel_annotation.term.termid).unwrap().len();
rel_annotation.term.genotype_count =
term_seen_genotypes.get(&rel_annotation.term.termid).unwrap().len();
}
for (reference_uniquename, reference_short) in
&mut term_details.references_by_uniquename {
reference_short.gene_count =
ref_seen_genes.get(reference_uniquename).unwrap().len();
}
term_details.single_allele_genotype_uniquenames =
term_seen_single_allele_genotypes.remove(&term_details.termid).unwrap();
}
}
pub fn get_web_data(&mut self) -> WebData {
self.process_references();
self.make_feature_rel_maps();
self.process_features();
self.add_gene_neighbourhoods();
self.process_props_from_feature_cvterms();
self.process_allele_features();
self.process_genotype_features();
self.add_alleles_to_genotypes();
self.process_cvterms();
self.add_interesting_parents();
self.process_cvterm_rels();
self.process_extension_cvterms();
self.process_feature_synonyms();
self.process_feature_cvterms();
self.store_ont_annotations(false);
self.store_ont_annotations(true);
self.process_cvtermpath();
self.process_annotation_feature_rels();
self.add_target_of_annotations();
self.make_all_cv_summaries();
self.set_term_details_maps();
self.set_gene_details_maps();
self.set_genotype_details_maps();
self.set_reference_details_maps();
self.set_counts();
let mut web_data_terms: IdTermDetailsMap = HashMap::new();
let search_api_maps = self.make_search_api_maps();
for (termid, term_details) in self.terms.drain() {
web_data_terms.insert(termid.clone(), Rc::new(term_details));
}
self.terms = HashMap::new();
let mut used_terms: IdTermDetailsMap = HashMap::new();
// remove terms with no annotation
for (termid, term_details) in &web_data_terms {
if term_details.rel_annotations.len() > 0 {
used_terms.insert(termid.clone(), term_details.clone());
}
}
let metadata = self.make_metadata();
WebData {
genes: self.genes.clone(),
genotypes: self.genotypes.clone(),
terms: web_data_terms,
used_terms: used_terms,
metadata: metadata,
references: self.references.clone(),
search_api_maps: search_api_maps,
}
}
}
Remove completely empty rows from summary JSON
Refs pombase/website#185
use std::rc::Rc;
use std::collections::hash_map::HashMap;
use std::collections::HashSet;
use std::borrow::Borrow;
use std::cmp::Ordering;
use regex::Regex;
use db::*;
use web::data::*;
use web::config::*;
use web::vec_set::*;
include!(concat!(env!("OUT_DIR"), "/config_serde.rs"));
fn make_organism_short(rc_organism: &Rc<Organism>) -> OrganismShort {
OrganismShort {
genus: rc_organism.genus.clone(),
species: rc_organism.species.clone(),
}
}
#[derive(Clone)]
pub struct AlleleAndExpression {
allele_uniquename: String,
expression: Option<String>,
}
pub struct WebDataBuild<'a> {
raw: &'a Raw,
config: &'a Config,
genes: UniquenameGeneMap,
transcripts: UniquenameTranscriptMap,
genotypes: UniquenameGenotypeMap,
alleles: UniquenameAlleleMap,
terms: HashMap<TermId, TermDetails>,
references: IdReferenceMap,
all_ont_annotations: HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>,
all_not_ont_annotations: HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>,
genes_of_transcripts: HashMap<String, String>,
transcripts_of_polypeptides: HashMap<String, String>,
genes_of_alleles: HashMap<String, String>,
alleles_of_genotypes: HashMap<String, Vec<AlleleAndExpression>>,
// gene_uniquename vs transcript_type_name:
transcript_type_of_genes: HashMap<String, String>,
// a map from IDs of terms from the "PomBase annotation extension terms" cv
// to a Vec of the details of each of the extension
parts_of_extensions: HashMap<TermId, Vec<ExtPart>>,
base_term_of_extensions: HashMap<TermId, TermId>,
}
fn get_maps() ->
(HashMap<String, ReferenceShortMap>,
HashMap<String, GeneShortMap>,
HashMap<String, GenotypeShortMap>,
HashMap<String, AlleleShortMap>,
HashMap<GeneUniquename, TermShortMap>)
{
(HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new(), HashMap::new())
}
fn get_feat_rel_expression(feature_relationship: &FeatureRelationship) -> Option<String> {
for prop in feature_relationship.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "expression" {
return prop.value.clone();
}
}
None
}
fn is_gene_type(feature_type_name: &str) -> bool {
feature_type_name == "gene" || feature_type_name == "pseudogene"
}
pub fn remove_first<T, P>(vec: &mut Vec<T>, predicate: P) -> Option<T>
where P: FnMut(&T) -> bool {
if let Some(pos) = vec.iter().position(predicate) {
return Some(vec.remove(pos));
}
None
}
pub fn merge_gene_ext_parts(ext_part1: &ExtPart, ext_part2: &ExtPart) -> ExtPart {
if ext_part1.rel_type_name == ext_part2.rel_type_name {
if let ExtRange::SummaryGenes(ref part1_summ_genes) = ext_part1.ext_range {
if let ExtRange::SummaryGenes(ref part2_summ_genes) = ext_part2.ext_range {
let mut ret_ext_part = ext_part1.clone();
let mut new_genes = [part1_summ_genes.clone(), part2_summ_genes.clone()].concat();
new_genes.sort();
new_genes.dedup();
ret_ext_part.ext_range = ExtRange::SummaryGenes(new_genes);
return ret_ext_part
}
}
panic!("passed ExtPart objects that have non-gene ranges to merge_gene_ext_parts():
{:?} {:?}", ext_part1, ext_part2);
} else {
panic!("passed ExtPart objects with mismatched relations to merge_gene_ext_parts():
{} {}\n", ext_part1.rel_type_name, ext_part2.rel_type_name);
}
}
pub fn collect_ext_summary_genes(cv_config: &CvConfig, rows: Vec<TermSummaryRow>)
-> Vec<TermSummaryRow> {
let conf_gene_rels = &cv_config.summary_gene_relations_to_collect;
let gene_range_rel_p =
|ext_part: &ExtPart| {
if let ExtRange::SummaryGenes(_) = ext_part.ext_range {
conf_gene_rels.contains(&ext_part.rel_type_name)
} else {
false
}
};
let mut ret_rows = vec![];
let mut row_iter = rows.iter().cloned();
if let Some(mut prev_row) = row_iter.next() {
for current_row in row_iter {
if prev_row.gene_uniquenames != current_row.gene_uniquenames ||
prev_row.genotype_uniquename != current_row.genotype_uniquename {
ret_rows.push(prev_row);
prev_row = current_row;
continue;
}
let mut prev_row_extension = prev_row.extension.clone();
let prev_matching_gene_ext_part =
remove_first(&mut prev_row_extension, &gene_range_rel_p);
let mut current_row_extension = current_row.extension.clone();
let current_matching_gene_ext_part =
remove_first(&mut current_row_extension, &gene_range_rel_p);
if let (Some(prev_gene_ext_part), Some(current_gene_ext_part)) =
(prev_matching_gene_ext_part, current_matching_gene_ext_part) {
if current_row_extension == prev_row_extension &&
prev_gene_ext_part.rel_type_name == current_gene_ext_part.rel_type_name {
let merged_gene_ext_parts =
merge_gene_ext_parts(&prev_gene_ext_part,
¤t_gene_ext_part);
let mut new_ext = vec![merged_gene_ext_parts];
new_ext.extend_from_slice(&prev_row_extension);
prev_row.extension = new_ext;
} else {
ret_rows.push(prev_row);
prev_row = current_row;
}
} else {
ret_rows.push(prev_row);
prev_row = current_row
}
}
ret_rows.push(prev_row);
}
ret_rows
}
// combine rows that have a gene and no extension into one row
pub fn collect_summary_rows(rows: &mut Vec<TermSummaryRow>) {
let mut no_ext_rows = vec![];
let mut other_rows = vec![];
for row in rows.drain(0..) {
if row.gene_uniquenames.len() > 0 && row.extension.len() == 0 {
if row.gene_uniquenames.len() > 1 {
panic!("row has more than one gene\n");
}
no_ext_rows.push(row);
} else {
other_rows.push(row);
}
}
let gene_uniquenames: Vec<String> =
no_ext_rows.iter().map(|row| row.gene_uniquenames.get(0).unwrap().clone())
.collect();
let genes_row = TermSummaryRow {
gene_uniquenames: gene_uniquenames,
genotype_uniquename: None,
extension: vec![],
};
rows.clear();
rows.push(genes_row);
rows.append(&mut other_rows);
}
// Remove annotations from the summary where there is another more
// specific annotation. ie. the same annotation but with extra part(s) in the
// extension.
// See: https://github.com/pombase/website/issues/185
pub fn remove_redundant_summary_rows(rows: &mut Vec<TermSummaryRow>) {
let mut results = vec![];
rows.sort();
if rows.len() <= 1 {
return;
}
rows.reverse();
let mut vec_set = VecSet::new();
let mut prev = rows.remove(0);
results.push(prev.clone());
if prev.gene_uniquenames.len() > 1 {
panic!("remove_redundant_summary_rows() failed: num genes > 1\n");
}
vec_set.insert(&prev.extension);
for current in rows.drain(0..) {
if current.gene_uniquenames.len() > 1 {
panic!("remove_redundant_summary_rows() failed: num genes > 1\n");
}
if prev.gene_uniquenames.len() == 0 &&
current.gene_uniquenames.len() == 0 ||
prev.gene_uniquenames.get(1) == current.gene_uniquenames.get(1) &&
!vec_set.contains_superset(¤t.extension) {
results.push(current.clone());
vec_set.insert(¤t.extension);
}
prev = current;
}
results.sort();
*rows = results;
}
fn make_cv_summaries(config: &Config, cvtermpath: &Vec<Rc<Cvtermpath>>,
include_gene: bool, include_genotype: bool,
term_and_annotations_vec: &Vec<OntTermAnnotations>) -> Vec<OntTermSummary> {
let mut result = vec![];
for ref term_and_annotations in term_and_annotations_vec {
let term = &term_and_annotations.term;
let cv_config = config.cv_config_by_name(&term.cv_name);
let mut rows = vec![];
for annotation in &term_and_annotations.annotations {
let gene_uniquenames =
if include_gene && cv_config.feature_type == "gene" {
if let Some(ref gene_uniquename) = annotation.gene_uniquename {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
};
let maybe_genotype_uniquename =
if include_genotype && cv_config.feature_type == "genotype" {
annotation.genotype_uniquename.clone()
} else {
None
};
if gene_uniquenames.len() == 0 &&
maybe_genotype_uniquename.is_none() &&
annotation.extension.len() == 0 {
continue;
}
let mut summary_extension = annotation.extension.iter().cloned()
.filter(|ext_part|
!cv_config.summary_relations_to_hide.contains(&ext_part.rel_type_name))
.map(move |mut ext_part| {
if let ExtRange::Gene(gene_uniquename) = ext_part.ext_range.clone() {
let summ_genes = vec![gene_uniquename];
ext_part.ext_range = ExtRange::SummaryGenes(vec![summ_genes]);
}
ext_part })
.collect::<Vec<ExtPart>>();
summary_extension.sort();
collect_duplicated_relations(&mut summary_extension);
let row = TermSummaryRow {
gene_uniquenames: gene_uniquenames,
genotype_uniquename: maybe_genotype_uniquename,
extension: summary_extension,
};
rows.push(row);
}
remove_redundant_summary_rows(&mut rows);
collect_summary_rows(&mut rows);
let summary = OntTermSummary {
term: term_and_annotations.term.clone(),
is_not: term_and_annotations.is_not,
rel_names: term_and_annotations.rel_names.clone(),
rows: collect_ext_summary_genes(&cv_config, rows),
};
result.push(summary);
}
result
}
// turns binds([[gene1]]),binds([[gene2]]),other_rel(...) into:
// binds([[gene1, gene2]]),other_rel(...)
pub fn collect_duplicated_relations(ext: &mut Vec<ExtPart>) {
let mut result: Vec<ExtPart> = vec![];
{
let mut iter = ext.iter().cloned();
if let Some(mut prev) = iter.next() {
for current in iter {
if prev.rel_type_name != current.rel_type_name {
result.push(prev);
prev = current;
continue;
}
if let ExtRange::SummaryGenes(ref current_summ_genes) = current.ext_range {
if let ExtRange::SummaryGenes(ref mut prev_summ_genes) = prev.ext_range {
let mut current_genes = current_summ_genes.get(0).unwrap().clone();
prev_summ_genes.get_mut(0).unwrap().append(& mut current_genes);
continue;
}
}
result.push(prev);
prev = current;
}
result.push(prev);
}
}
ext.clear();
ext.append(&mut result);
}
impl <'a> WebDataBuild<'a> {
pub fn new(raw: &'a Raw, config: &'a Config) -> WebDataBuild<'a> {
WebDataBuild {
raw: raw,
config: config,
genes: HashMap::new(),
transcripts: HashMap::new(),
genotypes: HashMap::new(),
alleles: HashMap::new(),
terms: HashMap::new(),
references: HashMap::new(),
all_ont_annotations: HashMap::new(),
all_not_ont_annotations: HashMap::new(),
genes_of_transcripts: HashMap::new(),
transcripts_of_polypeptides: HashMap::new(),
genes_of_alleles: HashMap::new(),
alleles_of_genotypes: HashMap::new(),
transcript_type_of_genes: HashMap::new(),
parts_of_extensions: HashMap::new(),
base_term_of_extensions: HashMap::new(),
}
}
fn add_ref_to_hash(&self,
seen_references: &mut HashMap<String, ReferenceShortMap>,
identifier: String,
maybe_reference_uniquename: Option<ReferenceUniquename>) {
if let Some(reference_uniquename) = maybe_reference_uniquename {
if let Some(reference_short) = self.make_reference_short(&reference_uniquename) {
seen_references
.entry(identifier.clone())
.or_insert(HashMap::new())
.insert(reference_uniquename.clone(),
reference_short);
}
}
}
fn add_gene_to_hash(&self,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
other_gene_uniquename: GeneUniquename) {
seen_genes
.entry(identifier)
.or_insert(HashMap::new())
.insert(other_gene_uniquename.clone(),
self.make_gene_short(&other_gene_uniquename));
}
fn add_genotype_to_hash(&self,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
genotype_uniquename: &GenotypeUniquename) {
let genotype = self.make_genotype_short(genotype_uniquename);
for expressed_allele in &genotype.expressed_alleles {
self.add_allele_to_hash(seen_alleles, seen_genes, identifier.clone(),
expressed_allele.allele_uniquename.clone());
}
seen_genotypes
.entry(identifier)
.or_insert(HashMap::new())
.insert(genotype_uniquename.clone(),
self.make_genotype_short(genotype_uniquename));
}
fn add_allele_to_hash(&self,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
identifier: String,
allele_uniquename: AlleleUniquename) -> AlleleShort {
let allele_short = self.make_allele_short(&allele_uniquename);
let allele_gene_uniquename =
allele_short.gene_uniquename.clone();
self.add_gene_to_hash(seen_genes, identifier.clone(), allele_gene_uniquename);
seen_alleles
.entry(identifier)
.or_insert(HashMap::new())
.insert(allele_uniquename, allele_short.clone());
allele_short
}
fn add_term_to_hash(&self,
seen_terms: &mut HashMap<TermId, TermShortMap>,
identifier: String,
other_termid: TermId) {
seen_terms
.entry(identifier)
.or_insert(HashMap::new())
.insert(other_termid.clone(),
self.make_term_short(&other_termid));
}
fn get_gene<'b>(&'b self, gene_uniquename: &'b str) -> &'b GeneDetails {
if let Some(gene_details) = self.genes.get(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn get_gene_mut<'b>(&'b mut self, gene_uniquename: &'b str) -> &'b mut GeneDetails {
if let Some(gene_details) = self.genes.get_mut(gene_uniquename) {
gene_details
} else {
panic!("can't find GeneDetails for gene uniquename {}", gene_uniquename)
}
}
fn make_gene_short(&self, gene_uniquename: &str) -> GeneShort {
let gene_details = self.get_gene(&gene_uniquename);
GeneShort {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
}
}
fn make_gene_summary(&self, gene_uniquename: &str) -> GeneSummary {
let gene_details = self.get_gene(&gene_uniquename);
GeneSummary {
uniquename: gene_details.uniquename.clone(),
name: gene_details.name.clone(),
product: gene_details.product.clone(),
synonyms: gene_details.synonyms.clone(),
feature_type: gene_details.feature_type.clone(),
organism: gene_details.organism.clone(),
location: gene_details.location.clone(),
}
}
fn make_reference_short(&self, reference_uniquename: &str) -> Option<ReferenceShort> {
if reference_uniquename == "null" {
None
} else {
let reference_details = self.references.get(reference_uniquename).unwrap();
let reference_short =
ReferenceShort {
uniquename: String::from(reference_uniquename),
title: reference_details.title.clone(),
citation: reference_details.citation.clone(),
publication_year: reference_details.publication_year.clone(),
authors: reference_details.authors.clone(),
authors_abbrev: reference_details.authors_abbrev.clone(),
gene_count: reference_details.genes_by_uniquename.keys().len(),
genotype_count: reference_details.genotypes_by_uniquename.keys().len(),
};
Some(reference_short)
}
}
fn make_term_short(&self, termid: &str) -> TermShort {
if let Some(term_details) = self.terms.get(termid) {
TermShort {
name: term_details.name.clone(),
cv_name: term_details.cv_name.clone(),
interesting_parents: term_details.interesting_parents.clone(),
termid: term_details.termid.clone(),
is_obsolete: term_details.is_obsolete,
gene_count: term_details.genes_by_uniquename.keys().len(),
genotype_count: term_details.genotypes_by_uniquename.keys().len(),
}
} else {
panic!("can't find TermDetails for termid: {}", termid)
}
}
fn add_characterisation_status(&mut self, gene_uniquename: &String, cvterm_name: &String) {
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
gene_details.characterisation_status = Some(cvterm_name.clone());
}
fn add_gene_product(&mut self, gene_uniquename: &String, product: &String) {
let mut gene_details = self.get_gene_mut(gene_uniquename);
gene_details.product = Some(product.clone());
}
fn add_name_description(&mut self, gene_uniquename: &str, name_description: &str) {
let mut gene_details = self.get_gene_mut(gene_uniquename);
gene_details.name_descriptions.push(name_description.into());
}
fn add_annotation(&mut self, cvterm: &Cvterm, is_not: bool,
annotation_template: OntAnnotationDetail) {
let termid =
match self.base_term_of_extensions.get(&cvterm.termid()) {
Some(base_termid) => base_termid.clone(),
None => cvterm.termid(),
};
let extension_parts =
match self.parts_of_extensions.get(&cvterm.termid()) {
Some(parts) => parts.clone(),
None => vec![],
};
let mut new_extension = extension_parts.clone();
let mut existing_extensions = annotation_template.extension.clone();
new_extension.append(&mut existing_extensions);
let ont_annotation_detail =
OntAnnotationDetail {
extension: new_extension,
.. annotation_template
};
let annotation_map = if is_not {
&mut self.all_not_ont_annotations
} else {
&mut self.all_ont_annotations
};
let entry = annotation_map.entry(termid.clone());
entry.or_insert(
vec![]
).push(Rc::new(ont_annotation_detail));
}
fn process_references(&mut self) {
for rc_publication in &self.raw.publications {
let reference_uniquename = &rc_publication.uniquename;
let mut pubmed_authors: Option<String> = None;
let mut pubmed_publication_date: Option<String> = None;
let mut pubmed_abstract: Option<String> = None;
for prop in rc_publication.publicationprops.borrow().iter() {
match &prop.prop_type.name as &str {
"pubmed_publication_date" =>
pubmed_publication_date = Some(prop.value.clone()),
"pubmed_authors" =>
pubmed_authors = Some(prop.value.clone()),
"pubmed_abstract" =>
pubmed_abstract = Some(prop.value.clone()),
_ => ()
}
}
let mut authors_abbrev = None;
let mut publication_year = None;
if let Some(authors) = pubmed_authors.clone() {
if authors.contains(",") {
let author_re = Regex::new(r"^(?P<f>[^,]+),.*$").unwrap();
authors_abbrev = Some(author_re.replace_all(&authors, "$f et al."));
} else {
authors_abbrev = Some(authors.clone());
}
}
if let Some(publication_date) = pubmed_publication_date.clone() {
let date_re = Regex::new(r"^(.* )?(?P<y>\d\d\d\d)$").unwrap();
publication_year = Some(date_re.replace_all(&publication_date, "$y"));
}
self.references.insert(reference_uniquename.clone(),
ReferenceDetails {
uniquename: reference_uniquename.clone(),
title: rc_publication.title.clone(),
citation: rc_publication.miniref.clone(),
pubmed_abstract: pubmed_abstract.clone(),
authors: pubmed_authors.clone(),
authors_abbrev: authors_abbrev,
pubmed_publication_date: pubmed_publication_date.clone(),
publication_year: publication_year,
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
}
fn make_feature_rel_maps(&mut self) {
for feature_rel in self.raw.feature_relationships.iter() {
let subject_type_name = &feature_rel.subject.feat_type.name;
let rel_name = &feature_rel.rel_type.name;
let object_type_name = &feature_rel.object.feat_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
if TRANSCRIPT_FEATURE_TYPES.contains(&subject_type_name.as_str()) &&
rel_name == "part_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_transcripts.insert(subject_uniquename.clone(),
object_uniquename.clone());
self.transcript_type_of_genes.insert(object_uniquename.clone(),
subject_type_name.clone());
continue;
}
if subject_type_name == "polypeptide" &&
rel_name == "derives_from" &&
object_type_name == "mRNA" {
self.transcripts_of_polypeptides.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if subject_type_name == "allele" {
if feature_rel.rel_type.name == "instance_of" &&
(object_type_name == "gene" || object_type_name == "pseudogene") {
self.genes_of_alleles.insert(subject_uniquename.clone(),
object_uniquename.clone());
continue;
}
if feature_rel.rel_type.name == "part_of" &&
object_type_name == "genotype" {
let allele_and_expression =
AlleleAndExpression {
allele_uniquename: subject_uniquename.clone(),
expression: get_feat_rel_expression(feature_rel),
};
let entry = self.alleles_of_genotypes.entry(object_uniquename.clone());
entry.or_insert(Vec::new()).push(allele_and_expression);
continue;
}
}
}
}
fn make_location(&self, feat: &Feature) -> Option<ChromosomeLocation> {
let feature_locs = feat.featurelocs.borrow();
match feature_locs.get(0) {
Some(feature_loc) => {
let start_pos =
if feature_loc.fmin + 1 >= 1 {
(feature_loc.fmin + 1) as u32
} else {
panic!("start_pos less than 1");
};
let end_pos =
if feature_loc.fmax >= 1 {
feature_loc.fmax as u32
} else {
panic!("start_end less than 1");
};
Some(ChromosomeLocation {
chromosome_name: feature_loc.srcfeature.uniquename.clone(),
start_pos: start_pos,
end_pos: end_pos,
strand: match feature_loc.strand {
1 => Strand::Forward,
-1 => Strand::Reverse,
_ => panic!(),
},
})
},
None => None,
}
}
fn store_gene_details(&mut self, feat: &Feature) {
let location = self.make_location(&feat);
let organism = make_organism_short(&feat.organism);
let feature_type =
if let Some(transcript_type) =
self.transcript_type_of_genes.get(&feat.uniquename) {
transcript_type.clone() + " " + &feat.feat_type.name
} else {
feat.feat_type.name.clone()
};
let gene_feature = GeneDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
organism: organism,
product: None,
name_descriptions: vec![],
synonyms: vec![],
feature_type: feature_type,
characterisation_status: None,
location: location,
gene_neighbourhood: vec![],
cds_location: None,
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
physical_interactions: vec![],
genetic_interactions: vec![],
ortholog_annotations: vec![],
paralog_annotations: vec![],
target_of_annotations: vec![],
transcripts: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
};
self.genes.insert(feat.uniquename.clone(), gene_feature);
}
fn store_genotype_details(&mut self, feat: &Feature) {
let mut background = None;
for prop in feat.featureprops.borrow().iter() {
if prop.prop_type.name == "genotype_background" {
background = prop.value.clone()
}
}
self.genotypes.insert(feat.uniquename.clone(),
GenotypeDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
background: background,
expressed_alleles: vec![],
cv_annotations: HashMap::new(),
cv_summaries: HashMap::new(),
genes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
fn store_allele_details(&mut self, feat: &Feature) {
let mut allele_type = None;
let mut description = None;
for prop in feat.featureprops.borrow().iter() {
match &prop.prop_type.name as &str {
"allele_type" =>
allele_type = prop.value.clone(),
"description" =>
description = prop.value.clone(),
_ => ()
}
}
if allele_type.is_none() {
panic!("no allele_type cvtermprop for {}", &feat.uniquename);
}
let gene_uniquename =
self.genes_of_alleles.get(&feat.uniquename).unwrap();
let allele_details = AlleleShort {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
gene_uniquename: gene_uniquename.clone(),
allele_type: allele_type.unwrap(),
description: description,
};
self.alleles.insert(feat.uniquename.clone(), allele_details);
}
fn process_feature(&mut self, feat: &Feature) {
match &feat.feat_type.name as &str {
"gene" | "pseudogene" =>
self.store_gene_details(feat),
_ => {
if TRANSCRIPT_FEATURE_TYPES.contains(&feat.feat_type.name.as_str()) {
self.transcripts.insert(feat.uniquename.clone(),
TranscriptDetails {
uniquename: feat.uniquename.clone(),
name: feat.name.clone(),
});
}
}
}
}
fn process_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name != "genotype" && feat.feat_type.name != "allele" {
self.process_feature(&feat);
}
}
}
fn add_interesting_parents(&mut self) {
let mut interesting_parents_by_termid: HashMap<String, HashSet<String>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if self.is_interesting_parent(&object_termid, &rel_term_name) {
interesting_parents_by_termid
.entry(subject_termid.clone())
.or_insert(HashSet::new())
.insert(object_termid.into());
};
}
for (termid, interesting_parents) in interesting_parents_by_termid {
let mut term_details = self.terms.get_mut(&termid).unwrap();
term_details.interesting_parents = interesting_parents;
}
}
fn process_allele_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "allele" {
self.store_allele_details(&feat);
}
}
}
fn process_genotype_features(&mut self) {
for feat in &self.raw.features {
if feat.feat_type.name == "genotype" {
self.store_genotype_details(&feat);
}
}
}
fn add_gene_neighbourhoods(&mut self) {
struct GeneAndLoc {
gene_uniquename: String,
loc: ChromosomeLocation,
};
let mut genes_and_locs: Vec<GeneAndLoc> = vec![];
for gene_details in self.genes.values() {
if let Some(ref location) = gene_details.location {
genes_and_locs.push(GeneAndLoc {
gene_uniquename: gene_details.uniquename.clone(),
loc: location.clone(),
});
}
}
let cmp = |a: &GeneAndLoc, b: &GeneAndLoc| {
let order = a.loc.chromosome_name.cmp(&b.loc.chromosome_name);
if order == Ordering::Equal {
a.loc.start_pos.cmp(&b.loc.start_pos)
} else {
order
}
};
genes_and_locs.sort_by(cmp);
for (i, this_gene_and_loc) in genes_and_locs.iter().enumerate() {
let mut nearby_genes: Vec<GeneShort> = vec![];
if i > 0 {
let start_index =
if i > GENE_NEIGHBOURHOOD_DISTANCE {
i - GENE_NEIGHBOURHOOD_DISTANCE
} else {
0
};
for back_index in (start_index..i).rev() {
let back_gene_and_loc = &genes_and_locs[back_index];
if back_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let back_gene_short = self.make_gene_short(&back_gene_and_loc.gene_uniquename);
nearby_genes.insert(0, back_gene_short);
}
}
let gene_short = self.make_gene_short(&this_gene_and_loc.gene_uniquename);
nearby_genes.push(gene_short);
if i < genes_and_locs.len() - 1 {
let end_index =
if i + GENE_NEIGHBOURHOOD_DISTANCE >= genes_and_locs.len() {
genes_and_locs.len()
} else {
i + GENE_NEIGHBOURHOOD_DISTANCE + 1
};
for forward_index in i+1..end_index {
let forward_gene_and_loc = &genes_and_locs[forward_index];
if forward_gene_and_loc.loc.chromosome_name !=
this_gene_and_loc.loc.chromosome_name {
break;
}
let forward_gene_short = self.make_gene_short(&forward_gene_and_loc.gene_uniquename);
nearby_genes.push(forward_gene_short);
}
}
let mut this_gene_details =
self.genes.get_mut(&this_gene_and_loc.gene_uniquename).unwrap();
this_gene_details.gene_neighbourhood.append(&mut nearby_genes);
}
}
fn add_alleles_to_genotypes(&mut self) {
let mut alleles_to_add: HashMap<String, Vec<ExpressedAllele>> = HashMap::new();
for genotype_uniquename in self.genotypes.keys() {
let allele_uniquenames: Vec<AlleleAndExpression> =
self.alleles_of_genotypes.get(genotype_uniquename).unwrap().clone();
let expressed_allele_vec: Vec<ExpressedAllele> =
allele_uniquenames.iter()
.map(|ref allele_and_expression| {
ExpressedAllele {
allele_uniquename: allele_and_expression.allele_uniquename.clone(),
expression: allele_and_expression.expression.clone(),
}
})
.collect();
alleles_to_add.insert(genotype_uniquename.clone(), expressed_allele_vec);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
genotype_details.expressed_alleles =
alleles_to_add.remove(genotype_uniquename).unwrap();
}
}
// add interaction, ortholog and paralog annotations
fn process_annotation_feature_rels(&mut self) {
for feature_rel in self.raw.feature_relationships.iter() {
let rel_name = &feature_rel.rel_type.name;
let subject_uniquename = &feature_rel.subject.uniquename;
let object_uniquename = &feature_rel.object.uniquename;
for rel_config in FEATURE_REL_CONFIGS.iter() {
if rel_name == rel_config.rel_type_name &&
is_gene_type(&feature_rel.subject.feat_type.name) &&
is_gene_type(&feature_rel.object.feat_type.name) {
let mut evidence: Option<Evidence> = None;
let borrowed_publications = feature_rel.publications.borrow();
let maybe_publication = borrowed_publications.get(0).clone();
let maybe_reference_uniquename =
match maybe_publication {
Some(publication) => Some(publication.uniquename.clone()),
None => None,
};
for prop in feature_rel.feature_relationshipprops.borrow().iter() {
if prop.prop_type.name == "evidence" {
if let Some(evidence_long) = prop.value.clone() {
if let Some(code) = self.config.evidence_types.get(&evidence_long) {
evidence = Some(code.clone());
} else {
evidence = Some(evidence_long);
}
}
}
}
let evidence_clone = evidence.clone();
let gene_uniquename = subject_uniquename;
let gene_organism_short = {
self.genes.get(subject_uniquename).unwrap().organism.clone()
};
let other_gene_uniquename = object_uniquename;
let other_gene_organism_short = {
self.genes.get(object_uniquename).unwrap().organism.clone()
};
{
let mut gene_details = self.genes.get_mut(subject_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {
let interaction_annotation =
InteractionAnnotation {
gene_uniquename: gene_uniquename.clone(),
interactor_uniquename: other_gene_uniquename.clone(),
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
if rel_name == "interacts_physically" {
gene_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
gene_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
if rel_name == "interacts_physically" {
ref_details.physical_interactions.push(interaction_annotation.clone());
} else {
if rel_name == "interacts_genetically" {
ref_details.genetic_interactions.push(interaction_annotation.clone());
} else {
panic!("unknown interaction type: {}", rel_name);
}
};
}
},
FeatureRelAnnotationType::Ortholog => {
let ortholog_annotation =
OrthologAnnotation {
gene_uniquename: gene_uniquename.clone(),
ortholog_uniquename: other_gene_uniquename.clone(),
ortholog_organism: other_gene_organism_short,
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
gene_details.ortholog_annotations.push(ortholog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
ref_details.ortholog_annotations.push(ortholog_annotation);
}
},
FeatureRelAnnotationType::Paralog => {
let paralog_annotation =
ParalogAnnotation {
gene_uniquename: gene_uniquename.clone(),
paralog_uniquename: other_gene_uniquename.clone(),
evidence: evidence,
reference_uniquename: maybe_reference_uniquename.clone(),
};
gene_details.paralog_annotations.push(paralog_annotation.clone());
if let Some(ref_details) =
if let Some(ref reference_uniquename) = maybe_reference_uniquename {
self.references.get_mut(reference_uniquename)
} else {
None
}
{
ref_details.paralog_annotations.push(paralog_annotation);
}
}
}
}
{
// for orthologs and paralogs, store the reverses annotation too
let mut other_gene_details = self.genes.get_mut(object_uniquename).unwrap();
match rel_config.annotation_type {
FeatureRelAnnotationType::Interaction => {},
FeatureRelAnnotationType::Ortholog =>
other_gene_details.ortholog_annotations.push(
OrthologAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
ortholog_uniquename: gene_uniquename.clone(),
ortholog_organism: gene_organism_short,
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename.clone(),
}),
FeatureRelAnnotationType::Paralog =>
other_gene_details.paralog_annotations.push(
ParalogAnnotation {
gene_uniquename: other_gene_uniquename.clone(),
paralog_uniquename: gene_uniquename.clone(),
evidence: evidence_clone,
reference_uniquename: maybe_reference_uniquename
}),
}
}
}
}
}
for (_, ref_details) in &mut self.references {
ref_details.physical_interactions.sort();
ref_details.genetic_interactions.sort();
ref_details.ortholog_annotations.sort();
ref_details.paralog_annotations.sort();
}
for (_, gene_details) in &mut self.genes {
gene_details.physical_interactions.sort();
gene_details.genetic_interactions.sort();
gene_details.ortholog_annotations.sort();
gene_details.paralog_annotations.sort();
}
}
fn matching_ext_config(&self, annotation_termid: &str,
rel_type_name: &str) -> Option<ExtensionConfig> {
let ext_configs = &self.config.extensions;
if let Some(annotation_term_details) = self.terms.get(annotation_termid) {
for ext_config in ext_configs {
if ext_config.rel_name == rel_type_name {
if let Some(if_descendent_of) = ext_config.if_descendent_of.clone() {
if annotation_term_details.interesting_parents.contains(&if_descendent_of) {
return Some((*ext_config).clone());
}
} else {
return Some((*ext_config).clone());
}
}
}
} else {
panic!("can't find details for term: {}\n", annotation_termid);
}
None
}
// create and returns any TargetOfAnnotations implied by the extension
fn make_target_of_for_ext(&self, cv_name: &String,
maybe_gene_uniquename: &Option<String>,
maybe_genotype_uniquename: &Option<String>,
reference_uniquename: &Option<String>,
annotation_termid: &String,
extension: &Vec<ExtPart>) -> Vec<(GeneUniquename, TargetOfAnnotation)> {
let mut ret_vec = vec![];
for ext_part in extension {
let maybe_ext_config =
self.matching_ext_config(annotation_termid, &ext_part.rel_type_name);
if let ExtRange::Gene(ref target_gene_uniquename) = ext_part.ext_range {
if let Some(ext_config) = maybe_ext_config {
if let Some(reciprocal_display_name) =
ext_config.reciprocal_display {
let (annotation_gene_uniquename, annotation_genotype_uniquename) =
if maybe_genotype_uniquename.is_some() {
(None, maybe_genotype_uniquename.clone())
} else {
(maybe_gene_uniquename.clone(), None)
};
ret_vec.push(((*target_gene_uniquename).clone(),
TargetOfAnnotation {
ontology_name: cv_name.clone(),
ext_rel_display_name: reciprocal_display_name,
gene_uniquename: annotation_gene_uniquename,
genotype_uniquename: annotation_genotype_uniquename,
reference_uniquename: reference_uniquename.clone(),
}));
}
}
}
}
ret_vec
}
fn add_target_of_annotations(&mut self) {
let mut target_of_annotations: HashMap<GeneUniquename, HashSet<TargetOfAnnotation>> =
HashMap::new();
for (_, term_details) in &self.terms {
for rel_annotation in &term_details.rel_annotations {
for annotation in rel_annotation.annotations.iter() {
let new_annotations =
self.make_target_of_for_ext(&term_details.cv_name,
&annotation.gene_uniquename,
&annotation.genotype_uniquename,
&annotation.reference_uniquename,
&term_details.termid, &annotation.extension);
for (target_gene_uniquename, new_annotation) in new_annotations {
target_of_annotations
.entry(target_gene_uniquename.clone())
.or_insert(HashSet::new())
.insert(new_annotation);
}
}
}
}
for (gene_uniquename, mut target_of_annotations) in target_of_annotations {
let mut gene_details = self.genes.get_mut(&gene_uniquename).unwrap();
gene_details.target_of_annotations = target_of_annotations.drain().collect();
}
}
fn make_all_cv_summaries(&mut self) {
for (_, term_details) in &mut self.terms {
term_details.rel_summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
true, true, &term_details.rel_annotations);
}
for (_, gene_details) in &mut self.genes {
for (cv_name, term_annotations) in &mut gene_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
false, true, &term_annotations);
gene_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
for (_, genotype_details) in &mut self.genotypes {
for (cv_name, term_annotations) in &mut genotype_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
false, false, &term_annotations);
genotype_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
for (_, reference_details) in &mut self.references {
for (cv_name, term_annotations) in &mut reference_details.cv_annotations {
let summaries =
make_cv_summaries(&self.config, &self.raw.cvtermpaths,
true, true, &term_annotations);
reference_details.cv_summaries.insert(cv_name.clone(), summaries);
}
}
}
fn process_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name != POMBASE_ANN_EXT_TERM_CV_NAME {
let cv_config =
self.config.cv_config_by_name(&cvterm.cv.name);
let annotation_feature_type =
cv_config.feature_type.clone();
self.terms.insert(cvterm.termid(),
TermDetails {
name: cvterm.name.clone(),
cv_name: cvterm.cv.name.clone(),
annotation_feature_type: annotation_feature_type,
interesting_parents: HashSet::new(),
termid: cvterm.termid(),
definition: cvterm.definition.clone(),
direct_ancestors: vec![],
is_obsolete: cvterm.is_obsolete,
single_allele_genotype_uniquenames: HashSet::new(),
rel_annotations: vec![],
rel_summaries: vec![],
not_rel_annotations: vec![],
genes_by_uniquename: HashMap::new(),
genotypes_by_uniquename: HashMap::new(),
alleles_by_uniquename: HashMap::new(),
references_by_uniquename: HashMap::new(),
terms_by_termid: HashMap::new(),
});
}
}
}
fn get_ext_rel_display_name(&self, annotation_termid: &String,
ext_rel_name: &String) -> String {
if let Some(ext_conf) = self.matching_ext_config(annotation_termid, ext_rel_name) {
ext_conf.display_name.clone()
} else {
let re = Regex::new("_").unwrap();
re.replace_all(&ext_rel_name, " ")
}
}
fn process_extension_cvterms(&mut self) {
for cvterm in &self.raw.cvterms {
if cvterm.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
for cvtermprop in cvterm.cvtermprops.borrow().iter() {
if (*cvtermprop).prop_type.name.starts_with(ANNOTATION_EXT_REL_PREFIX) {
let ext_rel_name_str =
&(*cvtermprop).prop_type.name[ANNOTATION_EXT_REL_PREFIX.len()..];
let ext_rel_name = String::from(ext_rel_name_str);
let ext_range = (*cvtermprop).value.clone();
let range: ExtRange = if ext_range.starts_with("SP") {
ExtRange::Gene(ext_range)
} else {
ExtRange::Misc(ext_range)
};
if let Some(base_termid) =
self.base_term_of_extensions.get(&cvterm.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(&base_termid, &ext_rel_name);
self.parts_of_extensions.entry(cvterm.termid())
.or_insert(Vec::new()).push(ExtPart {
rel_type_name: String::from(ext_rel_name),
rel_type_display_name: rel_type_display_name,
ext_range: range,
});
} else {
panic!("can't find details for term: {}\n", cvterm.termid());
}
}
}
}
}
}
fn process_cvterm_rels(&mut self) {
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name == "is_a" {
self.base_term_of_extensions.insert(subject_termid.clone(),
object_term.termid().clone());
}
} else {
let object_term_short =
self.make_term_short(&object_term.termid());
if let Some(ref mut subject_term_details) = self.terms.get_mut(&subject_term.termid()) {
subject_term_details.direct_ancestors.push(TermAndRelation {
termid: object_term_short.termid.clone(),
term_name: object_term_short.name.clone(),
relation_name: rel_type.name.clone(),
});
}
}
}
for cvterm_rel in &self.raw.cvterm_relationships {
let subject_term = &cvterm_rel.subject;
let object_term = &cvterm_rel.object;
let rel_type = &cvterm_rel.rel_type;
if subject_term.cv.name == POMBASE_ANN_EXT_TERM_CV_NAME {
let subject_termid = subject_term.termid();
if rel_type.name != "is_a" {
if let Some(base_termid) =
self.base_term_of_extensions.get(&subject_term.termid()) {
let rel_type_display_name =
self.get_ext_rel_display_name(base_termid, &rel_type.name);
self.parts_of_extensions.entry(subject_termid)
.or_insert(Vec::new()).push(ExtPart {
rel_type_name: rel_type.name.clone(),
rel_type_display_name: rel_type_display_name,
ext_range: ExtRange::Term(object_term.termid().clone()),
});
} else {
panic!("can't find details for {}\n", object_term.termid());
}
}
}
}
}
fn process_feature_synonyms(&mut self) {
for feature_synonym in self.raw.feature_synonyms.iter() {
let feature = &feature_synonym.feature;
let synonym = &feature_synonym.synonym;
if let Some(ref mut gene_details) = self.genes.get_mut(&feature.uniquename) {
gene_details.synonyms.push(SynonymDetails {
name: synonym.name.clone(),
synonym_type: synonym.synonym_type.name.clone()
});
}
}
}
fn make_genotype_short(&self, genotype_uniquename: &str) -> GenotypeShort {
let details = self.genotypes.get(genotype_uniquename).unwrap().clone();
GenotypeShort {
uniquename: details.uniquename,
name: details.name,
background: details.background,
expressed_alleles: details.expressed_alleles,
}
}
fn make_allele_short(&self, allele_uniquename: &str) -> AlleleShort {
self.alleles.get(allele_uniquename).unwrap().clone()
}
// process feature properties stored as cvterms,
// eg. characterisation_status and product
fn process_props_from_feature_cvterms(&mut self) {
for feature_cvterm in self.raw.feature_cvterms.iter() {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let gene_uniquenames_vec: Vec<GeneUniquename> =
if cvterm.cv.name == "PomBase gene products" {
if feature.feat_type.name == "polypeptide" {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
} else {
vec![]
};
for gene_uniquename in &gene_uniquenames_vec {
self.add_gene_product(&gene_uniquename, &cvterm.name);
}
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
if cvterm.cv.name == "PomBase gene characterisation status" {
self.add_characterisation_status(&feature.uniquename, &cvterm.name);
} else {
if cvterm.cv.name == "name_description" {
self.add_name_description(&feature.uniquename, &cvterm.name);
}
}
}
}
}
fn get_gene_prod_extension(&self, prod_value: &String) -> ExtPart {
let ext_range =
if prod_value.starts_with("PR:") {
ExtRange::GeneProduct(prod_value.clone())
} else {
ExtRange::Misc(prod_value.clone())
};
ExtPart {
rel_type_name: "active_form".into(),
rel_type_display_name: "active form".into(),
ext_range: ext_range,
}
}
// return a fake extension for "with" properties on protein binding annotations
fn get_with_extension(&self, with_value: &String) -> ExtPart {
let ext_range =
if with_value.starts_with("SP%") {
ExtRange::Gene(with_value.clone())
} else {
if with_value.starts_with("PomBase:SP") {
let gene_uniquename =
String::from(&with_value[8..]);
ExtRange::Gene(gene_uniquename)
} else {
if with_value.to_lowercase().starts_with("pfam:") {
ExtRange::Domain(with_value.clone())
} else {
ExtRange::Misc(with_value.clone())
}
}
};
// a with property on a protein binding (GO:0005515) is
// displayed as a binds extension
// https://github.com/pombase/website/issues/108
ExtPart {
rel_type_name: "binds".into(),
rel_type_display_name: "binds".into(),
ext_range: ext_range,
}
}
fn make_with_or_from_value(&self, with_or_from_value: String) -> WithFromValue {
let db_prefix_patt = String::from("^") + DB_NAME + ":";
let re = Regex::new(&db_prefix_patt).unwrap();
let gene_uniquename = re.replace_all(&with_or_from_value, "");
if self.genes.contains_key(&gene_uniquename) {
let gene_short = self.make_gene_short(&gene_uniquename);
WithFromValue::Gene(gene_short)
} else {
if self.terms.get(&with_or_from_value).is_some() {
WithFromValue::Term(self.make_term_short(&with_or_from_value))
} else {
WithFromValue::Identifier(with_or_from_value)
}
}
}
// add the with value as a fake extension if the cvterm is_a protein binding,
// otherwise return the value
fn make_with_extension(&self, termid: &String, evidence_code: Option<String>,
extension: &mut Vec<ExtPart>,
with_value: String) -> WithFromValue {
let base_termid =
match self.base_term_of_extensions.get(termid) {
Some(base_termid) => base_termid.clone(),
None => termid.clone(),
};
let base_term_short = self.make_term_short(&base_termid);
if evidence_code.is_some() &&
evidence_code.unwrap() == "IPI" &&
(base_term_short.termid == "GO:0005515" ||
base_term_short.interesting_parents
.contains("GO:0005515")) {
extension.push(self.get_with_extension(&with_value));
} else {
return self.make_with_or_from_value(with_value);
}
WithFromValue::None
}
// process annotation
fn process_feature_cvterms(&mut self) {
for feature_cvterm in self.raw.feature_cvterms.iter() {
let feature = &feature_cvterm.feature;
let cvterm = &feature_cvterm.cvterm;
let mut extension = vec![];
if cvterm.cv.name == "PomBase gene characterisation status" ||
cvterm.cv.name == "PomBase gene products" ||
cvterm.cv.name == "name_description" {
continue;
}
let publication = &feature_cvterm.publication;
let mut extra_props: HashMap<String, String> = HashMap::new();
let mut conditions: Vec<TermId> = vec![];
let mut with: WithFromValue = WithFromValue::None;
let mut from: WithFromValue = WithFromValue::None;
let mut qualifiers: Vec<Qualifier> = vec![];
let mut evidence: Option<String> = None;
let mut raw_with_value: Option<String> = None;
for ref prop in feature_cvterm.feature_cvtermprops.borrow().iter() {
match &prop.type_name() as &str {
"residue" | "scale" |
"quant_gene_ex_copies_per_cell" |
"quant_gene_ex_avg_copies_per_cell" => {
if let Some(value) = prop.value.clone() {
extra_props.insert(prop.type_name().clone(), value);
}
},
"evidence" =>
if let Some(evidence_long) = prop.value.clone() {
if let Some(code) = self.config.evidence_types.get(&evidence_long) {
evidence = Some(code.clone());
} else {
evidence = Some(evidence_long);
}
},
"condition" =>
if let Some(value) = prop.value.clone() {
conditions.push(value.clone());
},
"qualifier" =>
if let Some(value) = prop.value.clone() {
qualifiers.push(value);
},
"with" => {
raw_with_value = prop.value.clone();
},
"from" => {
if let Some(value) = prop.value.clone() {
from = self.make_with_or_from_value(value);
}
},
"gene_product_form_id" => {
if let Some(value) = prop.value.clone() {
extension.push(self.get_gene_prod_extension(&value));
}
},
_ => ()
}
}
if let Some(value) = raw_with_value {
let with_gene_short =
self.make_with_extension(&cvterm.termid(), evidence.clone(),
&mut extension, value);
if with_gene_short.is_some() {
with = with_gene_short;
}
}
let mut maybe_genotype_uniquename = None;
let mut gene_uniquenames_vec: Vec<GeneUniquename> =
match &feature.feat_type.name as &str {
"polypeptide" => {
if let Some(transcript_uniquename) =
self.transcripts_of_polypeptides.get(&feature.uniquename) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(transcript_uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
},
"genotype" => {
let genotype_short = self.make_genotype_short(&feature.uniquename);
maybe_genotype_uniquename = Some(genotype_short.uniquename.clone());
genotype_short.expressed_alleles.iter()
.map(|expressed_allele| {
let allele_short =
self.make_allele_short(&expressed_allele.allele_uniquename);
allele_short.gene_uniquename.clone()
})
.collect()
},
_ => {
if feature.feat_type.name == "gene" || feature.feat_type.name == "pseudogene" {
vec![feature.uniquename.clone()]
} else {
if TRANSCRIPT_FEATURE_TYPES.contains(&feature.feat_type.name.as_str()) {
if let Some(gene_uniquename) =
self.genes_of_transcripts.get(&feature.uniquename) {
vec![gene_uniquename.clone()]
} else {
vec![]
}
} else {
vec![]
}
}
}
};
gene_uniquenames_vec.dedup();
let reference_uniquename =
if publication.uniquename == "null" {
None
} else {
Some(publication.uniquename.clone())
};
for gene_uniquename in &gene_uniquenames_vec {
let mut extra_props_clone = extra_props.clone();
let copies_per_cell = extra_props_clone.remove("quant_gene_ex_copies_per_cell");
let avg_copies_per_cell = extra_props_clone.remove("quant_gene_ex_avg_copies_per_cell");
let scale = extra_props_clone.remove("scale");
let gene_ex_props =
if copies_per_cell.is_some() || avg_copies_per_cell.is_some() {
Some(GeneExProps {
copies_per_cell: copies_per_cell,
avg_copies_per_cell: avg_copies_per_cell,
scale: scale,
})
} else {
None
};
let maybe_gene_uniquename = Some(gene_uniquename.clone());
let annotation = OntAnnotationDetail {
id: feature_cvterm.feature_cvterm_id,
gene_uniquename: maybe_gene_uniquename,
reference_uniquename: reference_uniquename.clone(),
genotype_uniquename: maybe_genotype_uniquename.clone(),
with: with.clone(),
from: from.clone(),
residue: extra_props_clone.remove("residue"),
gene_ex_props: gene_ex_props,
qualifiers: qualifiers.clone(),
evidence: evidence.clone(),
conditions: conditions.clone(),
extension: extension.clone(),
};
self.add_annotation(cvterm.borrow(), feature_cvterm.is_not,
annotation);
}
}
}
fn make_term_annotations(&self, termid: &str, details: &Vec<Rc<OntAnnotationDetail>>,
is_not: bool)
-> Vec<(CvName, OntTermAnnotations)> {
let term_short = self.make_term_short(termid);
let cv_name = term_short.cv_name.clone();
match cv_name.as_ref() {
"gene_ex" => {
if is_not {
panic!("gene_ex annotations can't be NOT annotations");
}
let mut qual_annotations =
OntTermAnnotations {
term: term_short.clone(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
};
let mut quant_annotations =
OntTermAnnotations {
term: term_short.clone(),
is_not: false,
rel_names: HashSet::new(),
annotations: vec![],
};
for detail in details {
if detail.gene_ex_props.is_some() {
quant_annotations.annotations.push(detail.clone())
} else {
qual_annotations.annotations.push(detail.clone())
}
}
let mut return_vec = vec![];
if qual_annotations.annotations.len() > 0 {
return_vec.push((String::from("qualitative_gene_expression"),
qual_annotations));
}
if quant_annotations.annotations.len() > 0 {
return_vec.push((String::from("quantitative_gene_expression"),
quant_annotations));
}
return_vec
},
"fission_yeast_phenotype" => {
let mut single_allele =
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
};
let mut multi_allele =
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
};
for detail in details {
let genotype_uniquename = detail.genotype_uniquename.clone().unwrap();
if let Some(genotype_details) = self.genotypes.get(&genotype_uniquename) {
if genotype_details.expressed_alleles.len() == 1 {
single_allele.annotations.push(detail.clone())
} else {
multi_allele.annotations.push(detail.clone())
}
} else {
panic!("can't find genotype details for {}\n", genotype_uniquename);
}
}
let mut return_vec = vec![];
if single_allele.annotations.len() > 0 {
return_vec.push((String::from("single_allele_phenotype"),
single_allele));
}
if multi_allele.annotations.len() > 0 {
return_vec.push((String::from("multi_allele_phenotype"),
multi_allele));
}
return_vec
},
_ => {
vec![(cv_name,
OntTermAnnotations {
term: term_short.clone(),
is_not: is_not,
rel_names: HashSet::new(),
annotations: details.clone(),
})]
}
}
}
// store the OntTermAnnotations in the TermDetails, GeneDetails,
// GenotypeDetails and ReferenceDetails
fn store_ont_annotations(&mut self, is_not: bool) {
let ont_annotations = if is_not {
&self.all_not_ont_annotations
} else {
&self.all_ont_annotations
};
let mut gene_annotation_by_term: HashMap<GeneUniquename, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
let mut genotype_annotation_by_term: HashMap<GenotypeUniquename, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
let mut ref_annotation_by_term: HashMap<String, HashMap<TermId, Vec<Rc<OntAnnotationDetail>>>> =
HashMap::new();
for (termid, annotations) in ont_annotations {
let term_short = self.make_term_short(termid);
if let Some(ref mut term_details) = self.terms.get_mut(termid) {
let new_rel_ont_annotation = OntTermAnnotations {
rel_names: HashSet::new(),
is_not: is_not,
term: term_short.clone(),
annotations: annotations.clone(),
};
if is_not {
term_details.not_rel_annotations.push(new_rel_ont_annotation);
} else {
term_details.rel_annotations.push(new_rel_ont_annotation);
}
} else {
panic!("missing termid: {}\n", termid);
}
for detail in annotations {
gene_annotation_by_term.entry(detail.gene_uniquename.clone().unwrap())
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![])
.push(detail.clone());
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
let mut existing =
genotype_annotation_by_term.entry(genotype_uniquename.clone())
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![]);
if !existing.contains(detail) {
existing.push(detail.clone());
}
}
if let Some(reference_uniquename) = detail.reference_uniquename.clone() {
ref_annotation_by_term.entry(reference_uniquename)
.or_insert(HashMap::new())
.entry(termid.clone())
.or_insert(vec![])
.push(detail.clone());
}
for condition_termid in &detail.conditions {
let condition_term_short = {
self.make_term_short(&condition_termid)
};
if let Some(ref mut condition_term_details) =
self.terms.get_mut(&condition_termid.clone())
{
if condition_term_details.rel_annotations.len() == 0 {
condition_term_details.rel_annotations.push(
OntTermAnnotations {
term: condition_term_short,
is_not: is_not,
rel_names: HashSet::new(),
annotations: vec![],
});
}
if let Some(rel_annotation) = condition_term_details.rel_annotations.get_mut(0) {
rel_annotation.annotations.push(detail.clone())
}
}
}
}
}
for (gene_uniquename, term_annotation_map) in &gene_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
gene_details.cv_annotations.entry(cv_name.clone())
.or_insert(Vec::new())
.push(new_annotation);
}
}
let mut gene_details = self.genes.get_mut(gene_uniquename).unwrap();
for (_, mut cv_annotations) in &mut gene_details.cv_annotations {
cv_annotations.sort()
}
}
for (genotype_uniquename, term_annotation_map) in &genotype_annotation_by_term {
for (termid, details) in term_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
details.cv_annotations.entry(cv_name.clone())
.or_insert(Vec::new())
.push(new_annotation);
}
}
let mut details = self.genotypes.get_mut(genotype_uniquename).unwrap();
for (_, mut cv_annotations) in &mut details.cv_annotations {
cv_annotations.sort()
}
}
for (reference_uniquename, ref_annotation_map) in &ref_annotation_by_term {
for (termid, details) in ref_annotation_map {
let new_annotations =
self.make_term_annotations(&termid, &details, is_not);
let mut ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (cv_name, new_annotation) in new_annotations {
ref_details.cv_annotations.entry(cv_name).or_insert(Vec::new())
.push(new_annotation.clone());
}
}
let mut ref_details = self.references.get_mut(reference_uniquename).unwrap();
for (_, mut term_annotations) in &mut ref_details.cv_annotations {
term_annotations.sort()
}
}
}
fn is_interesting_parent(&self, termid: &str, rel_name: &str) -> bool {
for parent_conf in INTERESTING_PARENTS.iter() {
if parent_conf.termid == termid &&
parent_conf.rel_name == rel_name {
return true;
}
}
for ext_conf in &self.config.extensions {
if let Some(ref conf_termid) = ext_conf.if_descendent_of {
if conf_termid == termid && rel_name == "is_a" {
return true;
}
}
}
false
}
fn process_cvtermpath(&mut self) {
let mut annotation_by_id: HashMap<i32, Rc<OntAnnotationDetail>> = HashMap::new();
let mut new_annotations: HashMap<TermId, HashMap<TermId, HashMap<i32, HashSet<RelName>>>> =
HashMap::new();
for cvtermpath in &self.raw.cvtermpaths {
let subject_term = &cvtermpath.subject;
let subject_termid = subject_term.termid();
let object_term = &cvtermpath.object;
let object_termid = object_term.termid();
if let Some(subject_term_details) = self.terms.get(&subject_termid) {
let rel_termid =
match cvtermpath.rel_type {
Some(ref rel_type) => {
rel_type.termid()
},
None => panic!("no relation type for {} <-> {}\n",
&subject_term.name, &object_term.name)
};
let rel_term_name =
self.make_term_short(&rel_termid).name;
if rel_term_name == "has_part" &&
!HAS_PART_CV_NAMES.contains(&subject_term_details.cv_name.as_str()) {
continue;
}
if !DESCENDANT_REL_NAMES.contains(&rel_term_name.as_str()) {
continue;
}
let annotations = &subject_term_details.rel_annotations;
for rel_annotation in annotations {
let OntTermAnnotations {
rel_names: _,
is_not: _,
term: _,
annotations: existing_details
} = rel_annotation.clone();
for detail in &existing_details {
if !annotation_by_id.contains_key(&detail.id) {
annotation_by_id.insert(detail.id, detail.clone());
}
let (dest_termid, source_termid) =
(object_termid.clone(), subject_termid.clone());
new_annotations.entry(dest_termid)
.or_insert(HashMap::new())
.entry(source_termid)
.or_insert(HashMap::new())
.entry(detail.id)
.or_insert(HashSet::new())
.insert(rel_term_name.clone());
}
}
} else {
panic!("TermDetails not found for {}", &subject_termid);
}
}
for (dest_termid, dest_annotations_map) in new_annotations.drain() {
for (source_termid, source_annotations_map) in dest_annotations_map {
let mut new_details: Vec<Rc<OntAnnotationDetail>> = vec![];
let mut all_rel_names: HashSet<String> = HashSet::new();
for (id, rel_names) in source_annotations_map {
let detail = annotation_by_id.get(&id).unwrap().clone();
new_details.push(detail);
for rel_name in rel_names {
all_rel_names.insert(rel_name);
}
}
let source_term_short = self.make_term_short(&source_termid);
let mut dest_term_details = {
self.terms.get_mut(&dest_termid).unwrap()
};
dest_term_details.rel_annotations.push(OntTermAnnotations {
rel_names: all_rel_names,
is_not: false,
term: source_term_short.clone(),
annotations: new_details,
});
}
}
}
fn make_metadata(&mut self) -> Metadata {
let mut db_creation_datetime = None;
for chadoprop in &self.raw.chadoprops {
if chadoprop.prop_type.name == "db_creation_datetime" {
db_creation_datetime = chadoprop.value.clone();
}
}
const PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
const VERSION: &'static str = env!("CARGO_PKG_VERSION");
Metadata {
export_prog_name: String::from(PKG_NAME),
export_prog_version: String::from(VERSION),
db_creation_datetime: db_creation_datetime.unwrap(),
gene_count: self.genes.len(),
term_count: self.terms.len(),
}
}
pub fn make_search_api_maps(&self) -> SearchAPIMaps {
let mut gene_summaries: Vec<GeneSummary> = vec![];
let gene_uniquenames: Vec<String> =
self.genes.keys().map(|uniquename| uniquename.clone()).collect();
for gene_uniquename in gene_uniquenames {
gene_summaries.push(self.make_gene_summary(&gene_uniquename));
}
let mut term_summaries: HashSet<TermShort> = HashSet::new();
let mut termid_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_name_genes: HashMap<TermName, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
term_summaries.insert(self.make_term_short(&termid));
for gene_uniquename in term_details.genes_by_uniquename.keys() {
termid_genes.entry(termid.clone())
.or_insert(HashSet::new())
.insert(gene_uniquename.clone());
term_name_genes.entry(term_details.name.clone())
.or_insert(HashSet::new())
.insert(gene_uniquename.clone());
}
}
SearchAPIMaps {
gene_summaries: gene_summaries,
termid_genes: termid_genes,
term_name_genes: term_name_genes,
term_summaries: term_summaries,
}
}
fn add_cv_annotations_to_maps(&self,
identifier: &String,
cv_annotations: &OntAnnotationMap,
seen_references: &mut HashMap<String, ReferenceShortMap>,
seen_genes: &mut HashMap<String, GeneShortMap>,
seen_genotypes: &mut HashMap<String, GenotypeShortMap>,
seen_alleles: &mut HashMap<String, AlleleShortMap>,
seen_terms: &mut HashMap<String, TermShortMap>) {
for (_, feat_annotations) in cv_annotations {
for feat_annotation in feat_annotations.iter() {
for detail in &feat_annotation.annotations {
self.add_ref_to_hash(seen_references,
identifier.clone(), detail.reference_uniquename.clone());
for condition_termid in &detail.conditions {
self.add_term_to_hash(seen_terms,
identifier.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(seen_terms, identifier.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(seen_genes, identifier.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
self.add_genotype_to_hash(seen_genotypes, seen_alleles, seen_genes,
identifier.clone(),
&genotype_uniquename);
}
}
}
}
}
fn set_term_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (termid, term_details) in &self.terms {
for rel_annotation in &term_details.rel_annotations {
for detail in &rel_annotation.annotations {
let gene_uniquename = detail.gene_uniquename.clone();
self.add_gene_to_hash(&mut seen_genes, termid.clone(), gene_uniquename.unwrap().clone());
self.add_ref_to_hash(&mut seen_references, termid.clone(), detail.reference_uniquename.clone());
for condition_termid in &detail.conditions {
self.add_term_to_hash(&mut seen_terms, termid.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, termid.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, termid.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles,
&mut seen_genes, termid.clone(),
&genotype_uniquename);
}
}
}
}
for (termid, term_details) in &mut self.terms {
if let Some(genes) = seen_genes.remove(termid) {
term_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(termid) {
term_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(termid) {
term_details.alleles_by_uniquename = alleles;
}
if let Some(references) = seen_references.remove(termid) {
term_details.references_by_uniquename = references;
}
if let Some(terms) = seen_terms.remove(termid) {
term_details.terms_by_termid = terms;
}
}
}
fn set_gene_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
{
for (gene_uniquename, gene_details) in &self.genes {
self.add_cv_annotations_to_maps(&gene_uniquename,
&gene_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
let interaction_iter =
gene_details.physical_interactions.iter().chain(&gene_details.genetic_interactions);
for interaction in interaction_iter {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), interaction.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), interaction.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &gene_details.ortholog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), ortholog_annotation.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), ortholog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), ortholog_annotation.ortholog_uniquename.clone());
}
for paralog_annotation in &gene_details.paralog_annotations {
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(), paralog_annotation.reference_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), paralog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(), paralog_annotation.paralog_uniquename.clone());
}
for target_of_annotation in &gene_details.target_of_annotations {
if let Some(ref annotation_gene_uniquename) = target_of_annotation.gene_uniquename {
self.add_gene_to_hash(&mut seen_genes, gene_uniquename.clone(),
annotation_gene_uniquename.clone());
}
if let Some(ref annotation_genotype_uniquename) = target_of_annotation.genotype_uniquename {
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
gene_uniquename.clone(),
&annotation_genotype_uniquename.clone())
}
self.add_ref_to_hash(&mut seen_references, gene_uniquename.clone(),
target_of_annotation.reference_uniquename.clone());
}
}
}
for (gene_uniquename, gene_details) in &mut self.genes {
if let Some(references) = seen_references.remove(gene_uniquename) {
gene_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(gene_uniquename) {
gene_details.alleles_by_uniquename = alleles;
}
if let Some(genes) = seen_genes.remove(gene_uniquename) {
gene_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(gene_uniquename) {
gene_details.genotypes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(gene_uniquename) {
gene_details.terms_by_termid = terms;
}
}
}
fn set_genotype_details_maps(&mut self) {
let (mut seen_references, mut seen_genes, mut seen_genotypes,
mut seen_alleles, mut seen_terms) = get_maps();
for (genotype_uniquename, genotype_details) in &self.genotypes {
self.add_cv_annotations_to_maps(&genotype_uniquename,
&genotype_details.cv_annotations,
&mut seen_references,
&mut seen_genes,
&mut seen_genotypes,
&mut seen_alleles,
&mut seen_terms);
}
for (genotype_uniquename, genotype_details) in &mut self.genotypes {
if let Some(references) = seen_references.remove(genotype_uniquename) {
genotype_details.references_by_uniquename = references;
}
if let Some(alleles) = seen_alleles.remove(genotype_uniquename) {
genotype_details.alleles_by_uniquename = alleles;
}
if let Some(genotypes) = seen_genes.remove(genotype_uniquename) {
genotype_details.genes_by_uniquename = genotypes;
}
if let Some(terms) = seen_terms.remove(genotype_uniquename) {
genotype_details.terms_by_termid = terms;
}
}
}
fn set_reference_details_maps(&mut self) {
type GeneShortMap = HashMap<GeneUniquename, GeneShort>;
let mut seen_genes: HashMap<String, GeneShortMap> = HashMap::new();
type GenotypeShortMap = HashMap<GenotypeUniquename, GenotypeShort>;
let mut seen_genotypes: HashMap<ReferenceUniquename, GenotypeShortMap> = HashMap::new();
type AlleleShortMap = HashMap<AlleleUniquename, AlleleShort>;
let mut seen_alleles: HashMap<TermId, AlleleShortMap> = HashMap::new();
type TermShortMap = HashMap<TermId, TermShort>;
let mut seen_terms: HashMap<GeneUniquename, TermShortMap> = HashMap::new();
{
for (reference_uniquename, reference_details) in &self.references {
for (_, feat_annotations) in &reference_details.cv_annotations {
for feat_annotation in feat_annotations.iter() {
for detail in &feat_annotation.annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(),
detail.gene_uniquename.clone().unwrap());
for condition_termid in &detail.conditions {
self.add_term_to_hash(&mut seen_terms, reference_uniquename.clone(), condition_termid.clone());
}
for ext_part in &detail.extension {
match ext_part.ext_range {
ExtRange::Term(ref range_termid) =>
self.add_term_to_hash(&mut seen_terms, reference_uniquename.clone(), range_termid.clone()),
ExtRange::Gene(ref allele_gene_uniquename) =>
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(),
allele_gene_uniquename.clone()),
_ => {},
}
}
if let Some(ref genotype_uniquename) = detail.genotype_uniquename {
let genotype = self.make_genotype_short(genotype_uniquename);
self.add_genotype_to_hash(&mut seen_genotypes, &mut seen_alleles, &mut seen_genes,
reference_uniquename.clone(),
&genotype.uniquename);
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), interaction.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), ortholog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), ortholog_annotation.ortholog_uniquename.clone());
}
for paralog_annotation in &reference_details.paralog_annotations {
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), paralog_annotation.gene_uniquename.clone());
self.add_gene_to_hash(&mut seen_genes, reference_uniquename.clone(), paralog_annotation.paralog_uniquename.clone());
}
}
}
for (reference_uniquename, reference_details) in &mut self.references {
if let Some(genes) = seen_genes.remove(reference_uniquename) {
reference_details.genes_by_uniquename = genes;
}
if let Some(genotypes) = seen_genotypes.remove(reference_uniquename) {
reference_details.genotypes_by_uniquename = genotypes;
}
if let Some(alleles) = seen_alleles.remove(reference_uniquename) {
reference_details.alleles_by_uniquename = alleles;
}
if let Some(terms) = seen_terms.remove(reference_uniquename) {
reference_details.terms_by_termid = terms;
}
}
}
pub fn set_counts(&mut self) {
let mut term_seen_genes: HashMap<TermId, HashSet<GeneUniquename>> = HashMap::new();
let mut term_seen_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut term_seen_single_allele_genotypes: HashMap<TermId, HashSet<GenotypeUniquename>> = HashMap::new();
let mut ref_seen_genes: HashMap<ReferenceUniquename, HashSet<GeneUniquename>> = HashMap::new();
for (termid, term_details) in &self.terms {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
let mut seen_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
let mut seen_single_allele_genotypes: HashSet<GenotypeUniquename> = HashSet::new();
for rel_annotation in &term_details.rel_annotations {
for annotation in &rel_annotation.annotations {
seen_genes.insert(annotation.gene_uniquename.clone().unwrap());
if let Some(ref genotype_uniquename) = annotation.genotype_uniquename {
seen_genotypes.insert(genotype_uniquename.clone());
let genotype = self.genotypes.get(genotype_uniquename).unwrap();
if genotype.expressed_alleles.len() == 1 {
seen_single_allele_genotypes.insert(genotype_uniquename.clone());
}
}
}
}
term_seen_genes.insert(termid.clone(), seen_genes);
term_seen_genotypes.insert(termid.clone(), seen_genotypes);
term_seen_single_allele_genotypes.insert(termid.clone(), seen_single_allele_genotypes);
}
for (reference_uniquename, reference_details) in &self.references {
let mut seen_genes: HashSet<GeneUniquename> = HashSet::new();
for (_, rel_annotations) in &reference_details.cv_annotations {
for rel_annotation in rel_annotations {
for annotation in &rel_annotation.annotations {
if !rel_annotation.is_not {
seen_genes.insert(annotation.gene_uniquename.clone().unwrap());
}
}
}
}
let interaction_iter =
reference_details.physical_interactions.iter().chain(&reference_details.genetic_interactions);
for interaction in interaction_iter {
seen_genes.insert(interaction.gene_uniquename.clone());
seen_genes.insert(interaction.interactor_uniquename.clone());
}
for ortholog_annotation in &reference_details.ortholog_annotations {
seen_genes.insert(ortholog_annotation.gene_uniquename.clone());
}
ref_seen_genes.insert(reference_uniquename.clone(), seen_genes);
}
for (_, gene_details) in &mut self.genes {
for (_, feat_annotations) in &mut gene_details.cv_annotations {
for mut feat_annotation in feat_annotations.iter_mut() {
feat_annotation.term.gene_count =
term_seen_genes.get(&feat_annotation.term.termid).unwrap().len();
feat_annotation.term.genotype_count =
term_seen_genotypes.get(&feat_annotation.term.termid).unwrap().len();
}
}
for (reference_uniquename, reference_short) in
&mut gene_details.references_by_uniquename {
reference_short.gene_count =
ref_seen_genes.get(reference_uniquename).unwrap().len();
}
}
for (_, genotype_details) in &mut self.genotypes {
for (_, feat_annotations) in &mut genotype_details.cv_annotations {
for mut feat_annotation in feat_annotations.iter_mut() {
feat_annotation.term.genotype_count =
term_seen_genotypes.get(&feat_annotation.term.termid).unwrap().len();
}
}
}
for (_, ref_details) in &mut self.references {
for (_, ref_annotations) in &mut ref_details.cv_annotations {
for ref_annotation in ref_annotations {
ref_annotation.term.gene_count =
term_seen_genes.get(&ref_annotation.term.termid).unwrap().len();
ref_annotation.term.genotype_count =
term_seen_genotypes.get(&ref_annotation.term.termid).unwrap().len();
}
}
}
for (_, term_details) in &mut self.terms {
for rel_annotation in &mut term_details.rel_annotations {
rel_annotation.term.gene_count =
term_seen_genes.get(&rel_annotation.term.termid).unwrap().len();
rel_annotation.term.genotype_count =
term_seen_genotypes.get(&rel_annotation.term.termid).unwrap().len();
}
for (reference_uniquename, reference_short) in
&mut term_details.references_by_uniquename {
reference_short.gene_count =
ref_seen_genes.get(reference_uniquename).unwrap().len();
}
term_details.single_allele_genotype_uniquenames =
term_seen_single_allele_genotypes.remove(&term_details.termid).unwrap();
}
}
pub fn get_web_data(&mut self) -> WebData {
self.process_references();
self.make_feature_rel_maps();
self.process_features();
self.add_gene_neighbourhoods();
self.process_props_from_feature_cvterms();
self.process_allele_features();
self.process_genotype_features();
self.add_alleles_to_genotypes();
self.process_cvterms();
self.add_interesting_parents();
self.process_cvterm_rels();
self.process_extension_cvterms();
self.process_feature_synonyms();
self.process_feature_cvterms();
self.store_ont_annotations(false);
self.store_ont_annotations(true);
self.process_cvtermpath();
self.process_annotation_feature_rels();
self.add_target_of_annotations();
self.make_all_cv_summaries();
self.set_term_details_maps();
self.set_gene_details_maps();
self.set_genotype_details_maps();
self.set_reference_details_maps();
self.set_counts();
let mut web_data_terms: IdTermDetailsMap = HashMap::new();
let search_api_maps = self.make_search_api_maps();
for (termid, term_details) in self.terms.drain() {
web_data_terms.insert(termid.clone(), Rc::new(term_details));
}
self.terms = HashMap::new();
let mut used_terms: IdTermDetailsMap = HashMap::new();
// remove terms with no annotation
for (termid, term_details) in &web_data_terms {
if term_details.rel_annotations.len() > 0 {
used_terms.insert(termid.clone(), term_details.clone());
}
}
let metadata = self.make_metadata();
WebData {
genes: self.genes.clone(),
genotypes: self.genotypes.clone(),
terms: web_data_terms,
used_terms: used_terms,
metadata: metadata,
references: self.references.clone(),
search_api_maps: search_api_maps,
}
}
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use brotli::Decompressor;
use connector::Connector;
use cookie;
use cookie_storage::CookieStorage;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest};
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
use fetch::cors_cache::CorsCache;
use fetch::methods::{Data, DoneChannel, FetchContext, Target, is_simple_header, is_simple_method, main_fetch};
use flate2::read::{DeflateDecoder, GzDecoder};
use hsts::HstsList;
use hyper::Error as HttpError;
use hyper::LanguageTag;
use hyper::client::{Pool, Request as HyperRequest, Response as HyperResponse};
use hyper::client::pool::PooledStream;
use hyper::header::{Accept, AccessControlAllowCredentials, AccessControlAllowHeaders};
use hyper::header::{AccessControlAllowMethods, AccessControlAllowOrigin};
use hyper::header::{AccessControlMaxAge, AccessControlRequestHeaders};
use hyper::header::{AccessControlRequestMethod, AcceptEncoding, AcceptLanguage};
use hyper::header::{Authorization, Basic, CacheControl, CacheDirective};
use hyper::header::{ContentEncoding, ContentLength, Encoding, Header, Headers};
use hyper::header::{Host, Origin as HyperOrigin, IfMatch, IfRange};
use hyper::header::{IfUnmodifiedSince, IfModifiedSince, IfNoneMatch, Location};
use hyper::header::{Pragma, Quality, QualityItem, Referer, SetCookie};
use hyper::header::{UserAgent, q, qitem};
use hyper::method::Method;
use hyper::net::{Fresh, HttpStream, HttpsStream, NetworkConnector};
use hyper::status::StatusCode;
use hyper_openssl::SslStream;
use hyper_serde::Serde;
use log;
use msg::constellation_msg::PipelineId;
use net_traits::{CookieSource, FetchMetadata, NetworkError, ReferrerPolicy};
use net_traits::request::{CacheMode, CredentialsMode, Destination, Origin};
use net_traits::request::{RedirectMode, Referrer, Request, RequestMode};
use net_traits::request::{ResponseTainting, Type};
use net_traits::response::{HttpsState, Response, ResponseBody, ResponseType};
use resource_thread::AuthCache;
use servo_url::{ImmutableOrigin, ServoUrl};
use std::collections::HashSet;
use std::error::Error;
use std::io::{self, Read, Write};
use std::iter::FromIterator;
use std::mem;
use std::ops::Deref;
use std::sync::{Arc, RwLock};
use std::sync::mpsc::{channel, Sender};
use std::thread;
use time;
use time::Tm;
use unicase::UniCase;
use uuid;
fn read_block<R: Read>(reader: &mut R) -> Result<Data, ()> {
let mut buf = vec![0; 1024];
match reader.read(&mut buf) {
Ok(len) if len > 0 => {
buf.truncate(len);
Ok(Data::Payload(buf))
}
Ok(_) => Ok(Data::Done),
Err(_) => Err(()),
}
}
pub struct HttpState {
pub hsts_list: RwLock<HstsList>,
pub cookie_jar: RwLock<CookieStorage>,
pub auth_cache: RwLock<AuthCache>,
}
impl HttpState {
pub fn new() -> HttpState {
HttpState {
hsts_list: RwLock::new(HstsList::new()),
cookie_jar: RwLock::new(CookieStorage::new(150)),
auth_cache: RwLock::new(AuthCache::new()),
}
}
}
fn precise_time_ms() -> u64 {
time::precise_time_ns() / (1000 * 1000)
}
pub struct WrappedHttpResponse {
pub response: HyperResponse
}
impl Read for WrappedHttpResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.response.read(buf)
}
}
impl WrappedHttpResponse {
fn headers(&self) -> &Headers {
&self.response.headers
}
fn content_encoding(&self) -> Option<Encoding> {
let encodings = match self.headers().get::<ContentEncoding>() {
Some(&ContentEncoding(ref encodings)) => encodings,
None => return None,
};
if encodings.contains(&Encoding::Gzip) {
Some(Encoding::Gzip)
} else if encodings.contains(&Encoding::Deflate) {
Some(Encoding::Deflate)
} else if encodings.contains(&Encoding::EncodingExt("br".to_owned())) {
Some(Encoding::EncodingExt("br".to_owned()))
} else {
None
}
}
}
struct NetworkHttpRequestFactory {
pub connector: Arc<Pool<Connector>>,
}
impl NetworkConnector for NetworkHttpRequestFactory {
type Stream = PooledStream<HttpsStream<SslStream<HttpStream>>>;
fn connect(&self, host: &str, port: u16, scheme: &str) -> Result<Self::Stream, HttpError> {
self.connector.connect(host, port, scheme)
}
}
impl NetworkHttpRequestFactory {
fn create(&self, url: ServoUrl, method: Method, headers: Headers)
-> Result<HyperRequest<Fresh>, NetworkError> {
let connection = HyperRequest::with_connector(method, url.clone().into_url(), self);
let mut request = connection.map_err(|e| NetworkError::from_hyper_error(&url, e))?;
*request.headers_mut() = headers;
Ok(request)
}
}
// Step 3 of https://fetch.spec.whatwg.org/#concept-fetch.
pub fn set_default_accept(type_: Type, destination: Destination, headers: &mut Headers) {
if headers.has::<Accept>() {
return;
}
let value = match (type_, destination) {
// Step 3.2.
(_, Destination::Document) => {
vec![
qitem(mime!(Text / Html)),
qitem(mime!(Application / ("xhtml+xml"))),
QualityItem::new(mime!(Application / Xml), q(0.9)),
QualityItem::new(mime!(_ / _), q(0.8)),
]
},
// Step 3.3.
(Type::Image, _) => {
vec![
qitem(mime!(Image / Png)),
qitem(mime!(Image / ("svg+xml") )),
QualityItem::new(mime!(Image / _), q(0.8)),
QualityItem::new(mime!(_ / _), q(0.5)),
]
},
// Step 3.3.
(Type::Style, _) => {
vec![
qitem(mime!(Text / Css)),
QualityItem::new(mime!(_ / _), q(0.1))
]
},
// Step 3.1.
_ => {
vec![qitem(mime!(_ / _))]
},
};
// Step 3.4.
headers.set(Accept(value));
}
fn set_default_accept_encoding(headers: &mut Headers) {
if headers.has::<AcceptEncoding>() {
return
}
headers.set(AcceptEncoding(vec![
qitem(Encoding::Gzip),
qitem(Encoding::Deflate),
qitem(Encoding::EncodingExt("br".to_owned()))
]));
}
pub fn set_default_accept_language(headers: &mut Headers) {
if headers.has::<AcceptLanguage>() {
return;
}
let mut en_us: LanguageTag = Default::default();
en_us.language = Some("en".to_owned());
en_us.region = Some("US".to_owned());
let mut en: LanguageTag = Default::default();
en.language = Some("en".to_owned());
headers.set(AcceptLanguage(vec![
qitem(en_us),
QualityItem::new(en, Quality(500)),
]));
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-state-no-referrer-when-downgrade
fn no_referrer_when_downgrade_header(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
return strip_url(referrer_url, false);
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin
fn strict_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
strip_url(referrer_url, true)
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin-when-cross-origin
fn strict_origin_when_cross_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
let cross_origin = referrer_url.origin() != url.origin();
strip_url(referrer_url, cross_origin)
}
/// https://w3c.github.io/webappsec-referrer-policy/#strip-url
fn strip_url(mut referrer_url: ServoUrl, origin_only: bool) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" || referrer_url.scheme() == "http" {
{
let referrer = referrer_url.as_mut_url();
referrer.set_username("").unwrap();
referrer.set_password(None).unwrap();
referrer.set_fragment(None);
if origin_only {
referrer.set_path("");
referrer.set_query(None);
}
}
return Some(referrer_url);
}
return None;
}
/// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
/// Steps 4-6.
pub fn determine_request_referrer(headers: &mut Headers,
referrer_policy: ReferrerPolicy,
referrer_source: ServoUrl,
current_url: ServoUrl)
-> Option<ServoUrl> {
assert!(!headers.has::<Referer>());
// FIXME(#14505): this does not seem to be the correct way of checking for
// same-origin requests.
let cross_origin = referrer_source.origin() != current_url.origin();
// FIXME(#14506): some of these cases are expected to consider whether the
// request's client is "TLS-protected", whatever that means.
match referrer_policy {
ReferrerPolicy::NoReferrer => None,
ReferrerPolicy::Origin => strip_url(referrer_source, true),
ReferrerPolicy::SameOrigin => if cross_origin { None } else { strip_url(referrer_source, false) },
ReferrerPolicy::UnsafeUrl => strip_url(referrer_source, false),
ReferrerPolicy::OriginWhenCrossOrigin => strip_url(referrer_source, cross_origin),
ReferrerPolicy::StrictOrigin => strict_origin(referrer_source, current_url),
ReferrerPolicy::StrictOriginWhenCrossOrigin => strict_origin_when_cross_origin(referrer_source, current_url),
ReferrerPolicy::NoReferrerWhenDowngrade => no_referrer_when_downgrade_header(referrer_source, current_url),
}
}
pub fn set_request_cookies(url: &ServoUrl, headers: &mut Headers, cookie_jar: &RwLock<CookieStorage>) {
let mut cookie_jar = cookie_jar.write().unwrap();
if let Some(cookie_list) = cookie_jar.cookies_for_url(url, CookieSource::HTTP) {
let mut v = Vec::new();
v.push(cookie_list.into_bytes());
headers.set_raw("Cookie".to_owned(), v);
}
}
fn set_cookie_for_url(cookie_jar: &RwLock<CookieStorage>,
request: &ServoUrl,
cookie_val: String) {
let mut cookie_jar = cookie_jar.write().unwrap();
let source = CookieSource::HTTP;
let header = Header::parse_header(&[cookie_val.into_bytes()]);
if let Ok(SetCookie(cookies)) = header {
for cookie in cookies {
if let Some(cookie) = cookie::Cookie::from_cookie_string(cookie, request, source) {
cookie_jar.push(cookie, request, source);
}
}
}
}
fn set_cookies_from_headers(url: &ServoUrl, headers: &Headers, cookie_jar: &RwLock<CookieStorage>) {
if let Some(cookies) = headers.get_raw("set-cookie") {
for cookie in cookies.iter() {
if let Ok(cookie_value) = String::from_utf8(cookie.clone()) {
set_cookie_for_url(&cookie_jar,
&url,
cookie_value);
}
}
}
}
struct StreamedResponse {
decoder: Decoder,
}
impl Read for StreamedResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.decoder {
Decoder::Gzip(ref mut d) => d.read(buf),
Decoder::Deflate(ref mut d) => d.read(buf),
Decoder::Brotli(ref mut d) => d.read(buf),
Decoder::Plain(ref mut d) => d.read(buf)
}
}
}
impl StreamedResponse {
fn from_http_response(response: WrappedHttpResponse) -> io::Result<StreamedResponse> {
let decoder = match response.content_encoding() {
Some(Encoding::Gzip) => {
Decoder::Gzip(try!(GzDecoder::new(response)))
}
Some(Encoding::Deflate) => {
Decoder::Deflate(DeflateDecoder::new(response))
}
Some(Encoding::EncodingExt(ref ext)) if ext == "br" => {
Decoder::Brotli(Decompressor::new(response, 1024))
}
_ => {
Decoder::Plain(response)
}
};
Ok(StreamedResponse { decoder: decoder })
}
}
enum Decoder {
Gzip(GzDecoder<WrappedHttpResponse>),
Deflate(DeflateDecoder<WrappedHttpResponse>),
Brotli(Decompressor<WrappedHttpResponse>),
Plain(WrappedHttpResponse)
}
fn prepare_devtools_request(request_id: String,
url: ServoUrl,
method: Method,
headers: Headers,
body: Option<Vec<u8>>,
pipeline_id: PipelineId,
now: Tm,
connect_time: u64,
send_time: u64,
is_xhr: bool) -> ChromeToDevtoolsControlMsg {
let request = DevtoolsHttpRequest {
url: url,
method: method,
headers: headers,
body: body,
pipeline_id: pipeline_id,
startedDateTime: now,
timeStamp: now.to_timespec().sec,
connect_time: connect_time,
send_time: send_time,
is_xhr: is_xhr,
};
let net_event = NetworkEvent::HttpRequest(request);
ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event)
}
fn send_request_to_devtools(msg: ChromeToDevtoolsControlMsg,
devtools_chan: &Sender<DevtoolsControlMsg>) {
devtools_chan.send(DevtoolsControlMsg::FromChrome(msg)).unwrap();
}
fn send_response_to_devtools(devtools_chan: &Sender<DevtoolsControlMsg>,
request_id: String,
headers: Option<Headers>,
status: Option<(u16, Vec<u8>)>,
pipeline_id: PipelineId) {
let response = DevtoolsHttpResponse { headers: headers, status: status, body: None, pipeline_id: pipeline_id };
let net_event_response = NetworkEvent::HttpResponse(response);
let msg = ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event_response);
let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg));
}
fn auth_from_cache(auth_cache: &RwLock<AuthCache>, origin: &ImmutableOrigin) -> Option<Basic> {
if let Some(ref auth_entry) = auth_cache.read().unwrap().entries.get(&origin.ascii_serialization()) {
let user_name = auth_entry.user_name.clone();
let password = Some(auth_entry.password.clone());
Some(Basic { username: user_name, password: password })
} else {
None
}
}
fn obtain_response(request_factory: &NetworkHttpRequestFactory,
url: &ServoUrl,
method: &Method,
request_headers: &Headers,
data: &Option<Vec<u8>>,
load_data_method: &Method,
pipeline_id: &Option<PipelineId>,
iters: u32,
request_id: Option<&str>,
is_xhr: bool)
-> Result<(WrappedHttpResponse, Option<ChromeToDevtoolsControlMsg>), NetworkError> {
let null_data = None;
// loop trying connections in connection pool
// they may have grown stale (disconnected), in which case we'll get
// a ConnectionAborted error. this loop tries again with a new
// connection.
loop {
let mut headers = request_headers.clone();
// Avoid automatically sending request body if a redirect has occurred.
//
// TODO - This is the wrong behaviour according to the RFC. However, I'm not
// sure how much "correctness" vs. real-world is important in this case.
//
// https://tools.ietf.org/html/rfc7231#section-6.4
let is_redirected_request = iters != 1;
let request_body;
match data {
&Some(ref d) if !is_redirected_request => {
headers.set(ContentLength(d.len() as u64));
request_body = data;
}
_ => {
if *load_data_method != Method::Get && *load_data_method != Method::Head {
headers.set(ContentLength(0))
}
request_body = &null_data;
}
}
if log_enabled!(log::LogLevel::Info) {
info!("{} {}", method, url);
for header in headers.iter() {
info!(" - {}", header);
}
info!("{:?}", data);
}
let connect_start = precise_time_ms();
let request = try!(request_factory.create(url.clone(), method.clone(),
headers.clone()));
let connect_end = precise_time_ms();
let send_start = precise_time_ms();
let mut request_writer = match request.start() {
Ok(streaming) => streaming,
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
if let Some(ref data) = *request_body {
if let Err(e) = request_writer.write_all(&data) {
return Err(NetworkError::Internal(e.description().to_owned()))
}
}
let response = match request_writer.send() {
Ok(w) => w,
Err(HttpError::Io(ref io_error))
if io_error.kind() == io::ErrorKind::ConnectionAborted ||
io_error.kind() == io::ErrorKind::ConnectionReset => {
debug!("connection aborted ({:?}), possibly stale, trying new connection", io_error.description());
continue;
},
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
let send_end = precise_time_ms();
let msg = if let Some(request_id) = request_id {
if let Some(pipeline_id) = *pipeline_id {
Some(prepare_devtools_request(
request_id.into(),
url.clone(), method.clone(), headers,
request_body.clone(), pipeline_id, time::now(),
connect_end - connect_start, send_end - send_start, is_xhr))
} else {
debug!("Not notifying devtools (no pipeline_id)");
None
}
} else {
debug!("Not notifying devtools (no request_id)");
None
};
return Ok((WrappedHttpResponse { response: response }, msg));
}
}
/// [HTTP fetch](https://fetch.spec.whatwg.org#http-fetch)
pub fn http_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
cors_preflight_flag: bool,
authentication_fetch_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// This is a new async fetch, reset the channel we are waiting on
*done_chan = None;
// Step 1
let mut response: Option<Response> = None;
// Step 2
// nothing to do, since actual_response is a function on response
// Step 3
if !request.skip_service_worker && !request.is_service_worker_global_scope {
// Substep 1
// TODO (handle fetch unimplemented)
if let Some(ref res) = response {
// Substep 2
// nothing to do, since actual_response is a function on response
// Substep 3
if (res.response_type == ResponseType::Opaque &&
request.mode != RequestMode::NoCors) ||
(res.response_type == ResponseType::OpaqueRedirect &&
request.redirect_mode != RedirectMode::Manual) ||
(res.url_list.len() > 1 && request.redirect_mode != RedirectMode::Follow) ||
res.is_network_error() {
return Response::network_error(NetworkError::Internal("Request failed".into()));
}
// Substep 4
// TODO: set response's CSP list on actual_response
}
}
// Step 4
let credentials = match request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
// Step 5
if response.is_none() {
// Substep 1
if cors_preflight_flag {
let method_cache_match = cache.match_method(&*request,
request.method.clone());
let method_mismatch = !method_cache_match && (!is_simple_method(&request.method) ||
request.use_cors_preflight);
let header_mismatch = request.headers.iter().any(|view|
!cache.match_header(&*request, view.name()) && !is_simple_header(&view)
);
// Sub-substep 1
if method_mismatch || header_mismatch {
let preflight_result = cors_preflight_fetch(&request, cache, context);
// Sub-substep 2
if let Some(e) = preflight_result.get_network_error() {
return Response::network_error(e.clone());
}
}
}
// Substep 2
request.skip_service_worker = true;
// Substep 3
let mut fetch_result = http_network_or_cache_fetch(
request, authentication_fetch_flag, cors_flag, done_chan, context);
// Substep 4
if cors_flag && cors_check(&request, &fetch_result).is_err() {
return Response::network_error(NetworkError::Internal("CORS check failed".into()));
}
fetch_result.return_internal = false;
response = Some(fetch_result);
}
// response is guaranteed to be something by now
let mut response = response.unwrap();
// Step 5
match response.actual_response().status {
// Code 301, 302, 303, 307, 308
status if status.map_or(false, is_redirect_status) => {
response = match request.redirect_mode {
RedirectMode::Error => Response::network_error(NetworkError::Internal("Redirect mode error".into())),
RedirectMode::Manual => {
response.to_filtered(ResponseType::OpaqueRedirect)
},
RedirectMode::Follow => {
// set back to default
response.return_internal = true;
http_redirect_fetch(request, cache, response,
cors_flag, target, done_chan, context)
}
}
},
// Code 401
Some(StatusCode::Unauthorized) => {
// Step 1
// FIXME: Figure out what to do with request window objects
if cors_flag || !credentials {
return response;
}
// Step 2
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Step 3
if !request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Step 4
return http_fetch(request, cache, cors_flag, cors_preflight_flag,
true, target, done_chan, context);
}
// Code 407
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_fetch(request, cache,
// cors_flag, cors_preflight_flag,
// authentication_fetch_flag, target,
// done_chan, context);
}
_ => { }
}
// Step 6
if authentication_fetch_flag {
// TODO: Create authentication entry for this request
}
// set back to default
response.return_internal = true;
// Step 7
response
}
/// [HTTP redirect fetch](https://fetch.spec.whatwg.org#http-redirect-fetch)
fn http_redirect_fetch(request: &mut Request,
cache: &mut CorsCache,
response: Response,
cors_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1
assert!(response.return_internal);
// Step 2
if !response.actual_response().headers.has::<Location>() {
return response;
}
// Step 3
let location = match response.actual_response().headers.get::<Location>() {
Some(&Location(ref location)) => location.clone(),
_ => return Response::network_error(NetworkError::Internal("Location header parsing failure".into()))
};
let response_url = response.actual_response().url().unwrap();
let location_url = response_url.join(&*location);
let location_url = match location_url {
Ok(url) => url,
_ => return Response::network_error(NetworkError::Internal("Location URL parsing failure".into()))
};
// Step 4
match location_url.scheme() {
"http" | "https" => { },
_ => return Response::network_error(NetworkError::Internal("Not an HTTP(S) Scheme".into()))
}
// Step 5
if request.redirect_count >= 20 {
return Response::network_error(NetworkError::Internal("Too many redirects".into()));
}
// Step 6
request.redirect_count += 1;
// Step 7
let same_origin = location_url.origin()== request.current_url().origin();
let has_credentials = has_credentials(&location_url);
if request.mode == RequestMode::CorsMode && !same_origin && has_credentials {
return Response::network_error(NetworkError::Internal("Cross-origin credentials check failed".into()));
}
// Step 8
if cors_flag && has_credentials {
return Response::network_error(NetworkError::Internal("Credentials check failed".into()));
}
// Step 9
if cors_flag && !same_origin {
request.origin = Origin::Origin(ImmutableOrigin::new_opaque());
}
// Step 10
let status_code = response.actual_response().status.unwrap();
if ((status_code == StatusCode::MovedPermanently || status_code == StatusCode::Found) &&
request.method == Method::Post) ||
status_code == StatusCode::SeeOther {
request.method = Method::Get;
request.body = None;
}
// Step 11
request.url_list.push(location_url);
// Step 12
// TODO implement referrer policy
// Step 13
main_fetch(request, cache, cors_flag, true, target, done_chan, context)
}
fn try_immutable_origin_to_hyper_origin(url_origin: &ImmutableOrigin) -> Option<HyperOrigin> {
match *url_origin {
// TODO (servo/servo#15569) Set "Origin: null" when hyper supports it
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(ref scheme, ref host, ref port) =>
Some(HyperOrigin::new(scheme.clone(), host.to_string(), Some(port.clone())))
}
}
/// [HTTP network or cache fetch](https://fetch.spec.whatwg.org#http-network-or-cache-fetch)
fn http_network_or_cache_fetch(request: &mut Request,
authentication_fetch_flag: bool,
cors_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement Window enum for Request
let request_has_no_window = true;
// Step 2
let mut http_request;
let http_request = if request_has_no_window &&
request.redirect_mode == RedirectMode::Error {
request
} else {
// Step 3
// TODO Implement body source
http_request = request.clone();
&mut http_request
};
// Step 4
let credentials_flag = match http_request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if http_request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
let content_length_value = match http_request.body {
None =>
match http_request.method {
// Step 6
Method::Post | Method::Put =>
Some(0),
// Step 5
_ => None
},
// Step 7
Some(ref http_request_body) => Some(http_request_body.len() as u64)
};
// Step 8
if let Some(content_length_value) = content_length_value {
http_request.headers.set(ContentLength(content_length_value));
}
// Step 9 TODO: needs request's client object
// Step 10
match http_request.referrer {
Referrer::NoReferrer => (),
Referrer::ReferrerUrl(ref http_request_referrer) =>
http_request.headers.set(Referer(http_request_referrer.to_string())),
Referrer::Client =>
// it should be impossible for referrer to be anything else during fetching
// https://fetch.spec.whatwg.org/#concept-request-referrer
unreachable!()
};
// Step 11
if !http_request.omit_origin_header {
let method = &http_request.method;
if cors_flag || (*method != Method::Get && *method != Method::Head) {
debug_assert!(http_request.origin != Origin::Client);
if let Origin::Origin(ref url_origin) = http_request.origin {
if let Some(hyper_origin) = try_immutable_origin_to_hyper_origin(url_origin) {
http_request.headers.set(hyper_origin)
}
}
}
}
// Step 12
if !http_request.headers.has::<UserAgent>() {
let user_agent = context.user_agent.clone().into_owned();
http_request.headers.set(UserAgent(user_agent));
}
match http_request.cache_mode {
// Step 13
CacheMode::Default if is_no_store_cache(&http_request.headers) => {
http_request.cache_mode = CacheMode::NoStore;
},
// Step 14
CacheMode::NoCache if !http_request.headers.has::<CacheControl>() => {
http_request.headers.set(CacheControl(vec![CacheDirective::MaxAge(0)]));
},
// Step 15
CacheMode::Reload | CacheMode::NoStore => {
// Substep 1
if !http_request.headers.has::<Pragma>() {
http_request.headers.set(Pragma::NoCache);
}
// Substep 2
if !http_request.headers.has::<CacheControl>() {
http_request.headers.set(CacheControl(vec![CacheDirective::NoCache]));
}
},
_ => {}
}
// Step 16
let current_url = http_request.current_url();
let host = Host {
hostname: current_url.host_str().unwrap().to_owned(),
port: current_url.port()
};
http_request.headers.set(host);
// unlike http_loader, we should not set the accept header
// here, according to the fetch spec
set_default_accept_encoding(&mut http_request.headers);
// Step 17
// TODO some of this step can't be implemented yet
if credentials_flag {
// Substep 1
// TODO http://mxr.mozilla.org/servo/source/components/net/http_loader.rs#504
// XXXManishearth http_loader has block_cookies: support content blocking here too
set_request_cookies(¤t_url,
&mut http_request.headers,
&context.state.cookie_jar);
// Substep 2
if !http_request.headers.has::<Authorization<String>>() {
// Substep 3
let mut authorization_value = None;
// Substep 4
if let Some(basic) = auth_from_cache(&context.state.auth_cache, ¤t_url.origin()) {
if !http_request.use_url_credentials || !has_credentials(¤t_url) {
authorization_value = Some(basic);
}
}
// Substep 5
if authentication_fetch_flag && authorization_value.is_none() {
if has_credentials(¤t_url) {
authorization_value = Some(Basic {
username: current_url.username().to_owned(),
password: current_url.password().map(str::to_owned)
})
}
}
// Substep 6
if let Some(basic) = authorization_value {
http_request.headers.set(Authorization(basic));
}
}
}
// Step 18
// TODO If there’s a proxy-authentication entry, use it as appropriate.
// Step 19
let mut response: Option<Response> = None;
// Step 20
let mut revalidation_needed = false;
// Step 21
// TODO have a HTTP cache to check for a completed response
let complete_http_response_from_cache: Option<Response> = None;
if http_request.cache_mode != CacheMode::NoStore &&
http_request.cache_mode != CacheMode::Reload &&
complete_http_response_from_cache.is_some() {
// TODO Substep 1 and 2. Select a response from HTTP cache.
// Substep 3
if let Some(ref response) = response {
revalidation_needed = response_needs_revalidation(&response);
};
// Substep 4
if http_request.cache_mode == CacheMode::ForceCache ||
http_request.cache_mode == CacheMode::OnlyIfCached {
// TODO pull response from HTTP cache
// response = http_request
}
if revalidation_needed {
// Substep 5
// TODO set If-None-Match and If-Modified-Since according to cached
// response headers.
} else {
// Substep 6
// TODO pull response from HTTP cache
// response = http_request
// response.cache_state = CacheState::Local;
}
}
// Step 22
if response.is_none() {
// Substep 1
if http_request.cache_mode == CacheMode::OnlyIfCached {
return Response::network_error(
NetworkError::Internal("Couldn't find response in cache".into()))
}
// Substep 2
let forward_response = http_network_fetch(http_request, credentials_flag,
done_chan, context);
match forward_response.raw_status {
// Substep 3
Some((200...303, _)) |
Some((305...399, _)) => {
if !http_request.method.safe() {
// TODO Invalidate HTTP cache response
}
},
// Substep 4
Some((304, _)) => {
if revalidation_needed {
// TODO update forward_response headers with cached response
// headers
}
},
_ => {}
}
// Substep 5
if response.is_none() {
response = Some(forward_response);
}
}
let response = response.unwrap();
match response.status {
Some(StatusCode::Unauthorized) => {
// Step 23
// FIXME: Figure out what to do with request window objects
if cors_flag && !credentials_flag {
return response;
}
// Substep 1
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Substep 2
if http_request.body.is_some() {
// TODO Implement body source
}
// Substep 3
if !http_request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Substep 4
return http_network_or_cache_fetch(http_request,
true /* authentication flag */,
cors_flag, done_chan, context);
},
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 24
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_network_or_cache_fetch(request, authentication_fetch_flag,
// cors_flag, done_chan, context);
},
_ => {}
}
// Step 25
if authentication_fetch_flag {
// TODO Create the authentication entry for request and the given realm
}
// Step 26
response
}
/// [HTTP network fetch](https://fetch.spec.whatwg.org/#http-network-fetch)
fn http_network_fetch(request: &Request,
credentials_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement HTTP network fetch spec
// Step 1
// nothing to do here, since credentials_flag is already a boolean
// Step 2
// TODO be able to create connection using current url's origin and credentials
// Step 3
// TODO be able to tell if the connection is a failure
// Step 4
let factory = NetworkHttpRequestFactory {
connector: context.connector.clone(),
};
let url = request.current_url();
let request_id = context.devtools_chan.as_ref().map(|_| {
uuid::Uuid::new_v4().simple().to_string()
});
// XHR uses the default destination; other kinds of fetches (which haven't been implemented yet)
// do not. Once we support other kinds of fetches we'll need to be more fine grained here
// since things like image fetches are classified differently by devtools
let is_xhr = request.destination == Destination::None;
let wrapped_response = obtain_response(&factory, &url, &request.method,
&request.headers,
&request.body, &request.method,
&request.pipeline_id, request.redirect_count + 1,
request_id.as_ref().map(Deref::deref), is_xhr);
let pipeline_id = request.pipeline_id;
let (res, msg) = match wrapped_response {
Ok(wrapped_response) => wrapped_response,
Err(error) => return Response::network_error(error),
};
let mut response = Response::new(url.clone());
response.status = Some(res.response.status);
response.raw_status = Some((res.response.status_raw().0,
res.response.status_raw().1.as_bytes().to_vec()));
response.headers = res.response.headers.clone();
response.referrer = request.referrer.to_url().cloned();
let res_body = response.body.clone();
// We're about to spawn a thread to be waited on here
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
let meta = match response.metadata().expect("Response metadata should exist at this stage") {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_
};
let devtools_sender = context.devtools_chan.clone();
let meta_status = meta.status.clone();
let meta_headers = meta.headers.clone();
thread::Builder::new().name(format!("fetch worker thread")).spawn(move || {
match StreamedResponse::from_http_response(res) {
Ok(mut res) => {
*res_body.lock().unwrap() = ResponseBody::Receiving(vec![]);
if let Some(ref sender) = devtools_sender {
if let Some(m) = msg {
send_request_to_devtools(m, &sender);
}
// --- Tell devtools that we got a response
// Send an HttpResponse message to devtools with the corresponding request_id
if let Some(pipeline_id) = pipeline_id {
send_response_to_devtools(
&sender, request_id.unwrap(),
meta_headers.map(Serde::into_inner),
meta_status,
pipeline_id);
}
}
loop {
match read_block(&mut res) {
Ok(Data::Payload(chunk)) => {
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
body.extend_from_slice(&chunk);
let _ = done_sender.send(Data::Payload(chunk));
}
},
Ok(Data::Done) | Err(_) => {
let mut body = res_body.lock().unwrap();
let completed_body = match *body {
ResponseBody::Receiving(ref mut body) => {
mem::replace(body, vec![])
},
_ => vec![],
};
*body = ResponseBody::Done(completed_body);
let _ = done_sender.send(Data::Done);
break;
}
}
}
}
Err(_) => {
// XXXManishearth we should propagate this error somehow
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
let _ = done_sender.send(Data::Done);
}
}
}).expect("Thread spawning failed");
// TODO these substeps aren't possible yet
// Substep 1
// Substep 2
// TODO Determine if response was retrieved over HTTPS
// TODO Servo needs to decide what ciphers are to be treated as "deprecated"
response.https_state = HttpsState::None;
// TODO Read request
// Step 5-9
// (needs stream bodies)
// Step 10
// TODO when https://bugzilla.mozilla.org/show_bug.cgi?id=1030660
// is resolved, this step will become uneccesary
// TODO this step
if let Some(encoding) = response.headers.get::<ContentEncoding>() {
if encoding.contains(&Encoding::Gzip) {
}
else if encoding.contains(&Encoding::Compress) {
}
};
// Step 11
// TODO this step isn't possible yet (CSP)
// Step 12
if response.is_network_error() && request.cache_mode == CacheMode::NoStore {
// TODO update response in the HTTP cache for request
}
// TODO this step isn't possible yet
// Step 13
// Step 14.
if credentials_flag {
set_cookies_from_headers(&url, &response.headers, &context.state.cookie_jar);
}
// TODO these steps
// Step 15
// Substep 1
// Substep 2
// Sub-substep 1
// Sub-substep 2
// Sub-substep 3
// Sub-substep 4
// Substep 3
// Step 16
response
}
/// [CORS preflight fetch](https://fetch.spec.whatwg.org#cors-preflight-fetch)
fn cors_preflight_fetch(request: &Request,
cache: &mut CorsCache,
context: &FetchContext)
-> Response {
// Step 1
let mut preflight = Request::new(request.current_url(), Some(request.origin.clone()),
request.is_service_worker_global_scope, request.pipeline_id);
preflight.method = Method::Options;
preflight.initiator = request.initiator.clone();
preflight.type_ = request.type_.clone();
preflight.destination = request.destination.clone();
preflight.referrer = request.referrer.clone();
preflight.referrer_policy = request.referrer_policy;
// Step 2
preflight.headers.set::<AccessControlRequestMethod>(
AccessControlRequestMethod(request.method.clone()));
// Step 3, 4
let mut value = request.headers
.iter()
.filter(|view| !is_simple_header(view))
.map(|view| UniCase(view.name().to_owned()))
.collect::<Vec<UniCase<String>>>();
value.sort();
// Step 5
preflight.headers.set::<AccessControlRequestHeaders>(
AccessControlRequestHeaders(value));
// Step 6
let response = http_network_or_cache_fetch(&mut preflight, false, false, &mut None, context);
// Step 7
if cors_check(&request, &response).is_ok() &&
response.status.map_or(false, |status| status.is_success()) {
// Substep 1
let mut methods = if response.headers.has::<AccessControlAllowMethods>() {
match response.headers.get::<AccessControlAllowMethods>() {
Some(&AccessControlAllowMethods(ref m)) => m.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAM check failed".into()))
}
} else {
vec![]
};
// Substep 2
let header_names = if response.headers.has::<AccessControlAllowHeaders>() {
match response.headers.get::<AccessControlAllowHeaders>() {
Some(&AccessControlAllowHeaders(ref hn)) => hn.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAH check failed".into()))
}
} else {
vec![]
};
// Substep 4
if methods.is_empty() && request.use_cors_preflight {
methods = vec![request.method.clone()];
}
// Substep 5
debug!("CORS check: Allowed methods: {:?}, current method: {:?}",
methods, request.method);
if methods.iter().all(|method| *method != request.method) &&
!is_simple_method(&request.method) {
return Response::network_error(NetworkError::Internal("CORS method check failed".into()));
}
// Substep 6
debug!("CORS check: Allowed headers: {:?}, current headers: {:?}", header_names, request.headers);
let set: HashSet<&UniCase<String>> = HashSet::from_iter(header_names.iter());
if request.headers.iter().any(|ref hv| !set.contains(&UniCase(hv.name().to_owned())) && !is_simple_header(hv)) {
return Response::network_error(NetworkError::Internal("CORS headers check failed".into()));
}
// Substep 7, 8
let max_age = response.headers.get::<AccessControlMaxAge>().map(|acma| acma.0).unwrap_or(0);
// TODO: Substep 9 - Need to define what an imposed limit on max-age is
// Substep 11, 12
for method in &methods {
cache.match_method_and_update(&*request, method.clone(), max_age);
}
// Substep 13, 14
for header_name in &header_names {
cache.match_header_and_update(&*request, &*header_name, max_age);
}
// Substep 15
return response;
}
// Step 8
Response::network_error(NetworkError::Internal("CORS check failed".into()))
}
/// [CORS check](https://fetch.spec.whatwg.org#concept-cors-check)
fn cors_check(request: &Request, response: &Response) -> Result<(), ()> {
// Step 1
let origin = response.headers.get::<AccessControlAllowOrigin>().cloned();
// Step 2
let origin = try!(origin.ok_or(()));
// Step 3
if request.credentials_mode != CredentialsMode::Include &&
origin == AccessControlAllowOrigin::Any {
return Ok(());
}
// Step 4
let origin = match origin {
AccessControlAllowOrigin::Value(origin) => origin,
// if it's Any or Null at this point, there's nothing to do but return Err(())
_ => return Err(())
};
match request.origin {
Origin::Origin(ref o) if o.ascii_serialization() == origin => {},
_ => return Err(())
}
// Step 5
if request.credentials_mode != CredentialsMode::Include {
return Ok(());
}
// Step 6
let credentials = response.headers.get::<AccessControlAllowCredentials>().cloned();
// Step 7
if credentials.is_some() {
return Ok(());
}
// Step 8
Err(())
}
fn has_credentials(url: &ServoUrl) -> bool {
!url.username().is_empty() || url.password().is_some()
}
fn is_no_store_cache(headers: &Headers) -> bool {
headers.has::<IfModifiedSince>() | headers.has::<IfNoneMatch>() |
headers.has::<IfUnmodifiedSince>() | headers.has::<IfMatch>() |
headers.has::<IfRange>()
}
fn response_needs_revalidation(_response: &Response) -> bool {
// TODO this function
false
}
/// https://fetch.spec.whatwg.org/#redirect-status
pub fn is_redirect_status(status: StatusCode) -> bool {
match status {
StatusCode::MovedPermanently |
StatusCode::Found |
StatusCode::SeeOther |
StatusCode::TemporaryRedirect |
StatusCode::PermanentRedirect => true,
_ => false,
}
}
Remove unnecessary NetworkHttpRequestFactory abstraction.
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use brotli::Decompressor;
use connector::Connector;
use cookie;
use cookie_storage::CookieStorage;
use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg, HttpRequest as DevtoolsHttpRequest};
use devtools_traits::{HttpResponse as DevtoolsHttpResponse, NetworkEvent};
use fetch::cors_cache::CorsCache;
use fetch::methods::{Data, DoneChannel, FetchContext, Target, is_simple_header, is_simple_method, main_fetch};
use flate2::read::{DeflateDecoder, GzDecoder};
use hsts::HstsList;
use hyper::Error as HttpError;
use hyper::LanguageTag;
use hyper::client::{Pool, Request as HyperRequest, Response as HyperResponse};
use hyper::header::{Accept, AccessControlAllowCredentials, AccessControlAllowHeaders};
use hyper::header::{AccessControlAllowMethods, AccessControlAllowOrigin};
use hyper::header::{AccessControlMaxAge, AccessControlRequestHeaders};
use hyper::header::{AccessControlRequestMethod, AcceptEncoding, AcceptLanguage};
use hyper::header::{Authorization, Basic, CacheControl, CacheDirective};
use hyper::header::{ContentEncoding, ContentLength, Encoding, Header, Headers};
use hyper::header::{Host, Origin as HyperOrigin, IfMatch, IfRange};
use hyper::header::{IfUnmodifiedSince, IfModifiedSince, IfNoneMatch, Location};
use hyper::header::{Pragma, Quality, QualityItem, Referer, SetCookie};
use hyper::header::{UserAgent, q, qitem};
use hyper::method::Method;
use hyper::status::StatusCode;
use hyper_serde::Serde;
use log;
use msg::constellation_msg::PipelineId;
use net_traits::{CookieSource, FetchMetadata, NetworkError, ReferrerPolicy};
use net_traits::request::{CacheMode, CredentialsMode, Destination, Origin};
use net_traits::request::{RedirectMode, Referrer, Request, RequestMode};
use net_traits::request::{ResponseTainting, Type};
use net_traits::response::{HttpsState, Response, ResponseBody, ResponseType};
use resource_thread::AuthCache;
use servo_url::{ImmutableOrigin, ServoUrl};
use std::collections::HashSet;
use std::error::Error;
use std::io::{self, Read, Write};
use std::iter::FromIterator;
use std::mem;
use std::ops::Deref;
use std::sync::{Arc, RwLock};
use std::sync::mpsc::{channel, Sender};
use std::thread;
use time;
use time::Tm;
use unicase::UniCase;
use uuid;
fn read_block<R: Read>(reader: &mut R) -> Result<Data, ()> {
let mut buf = vec![0; 1024];
match reader.read(&mut buf) {
Ok(len) if len > 0 => {
buf.truncate(len);
Ok(Data::Payload(buf))
}
Ok(_) => Ok(Data::Done),
Err(_) => Err(()),
}
}
pub struct HttpState {
pub hsts_list: RwLock<HstsList>,
pub cookie_jar: RwLock<CookieStorage>,
pub auth_cache: RwLock<AuthCache>,
}
impl HttpState {
pub fn new() -> HttpState {
HttpState {
hsts_list: RwLock::new(HstsList::new()),
cookie_jar: RwLock::new(CookieStorage::new(150)),
auth_cache: RwLock::new(AuthCache::new()),
}
}
}
fn precise_time_ms() -> u64 {
time::precise_time_ns() / (1000 * 1000)
}
pub struct WrappedHttpResponse {
pub response: HyperResponse
}
impl Read for WrappedHttpResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.response.read(buf)
}
}
impl WrappedHttpResponse {
fn headers(&self) -> &Headers {
&self.response.headers
}
fn content_encoding(&self) -> Option<Encoding> {
let encodings = match self.headers().get::<ContentEncoding>() {
Some(&ContentEncoding(ref encodings)) => encodings,
None => return None,
};
if encodings.contains(&Encoding::Gzip) {
Some(Encoding::Gzip)
} else if encodings.contains(&Encoding::Deflate) {
Some(Encoding::Deflate)
} else if encodings.contains(&Encoding::EncodingExt("br".to_owned())) {
Some(Encoding::EncodingExt("br".to_owned()))
} else {
None
}
}
}
// Step 3 of https://fetch.spec.whatwg.org/#concept-fetch.
pub fn set_default_accept(type_: Type, destination: Destination, headers: &mut Headers) {
if headers.has::<Accept>() {
return;
}
let value = match (type_, destination) {
// Step 3.2.
(_, Destination::Document) => {
vec![
qitem(mime!(Text / Html)),
qitem(mime!(Application / ("xhtml+xml"))),
QualityItem::new(mime!(Application / Xml), q(0.9)),
QualityItem::new(mime!(_ / _), q(0.8)),
]
},
// Step 3.3.
(Type::Image, _) => {
vec![
qitem(mime!(Image / Png)),
qitem(mime!(Image / ("svg+xml") )),
QualityItem::new(mime!(Image / _), q(0.8)),
QualityItem::new(mime!(_ / _), q(0.5)),
]
},
// Step 3.3.
(Type::Style, _) => {
vec![
qitem(mime!(Text / Css)),
QualityItem::new(mime!(_ / _), q(0.1))
]
},
// Step 3.1.
_ => {
vec![qitem(mime!(_ / _))]
},
};
// Step 3.4.
headers.set(Accept(value));
}
fn set_default_accept_encoding(headers: &mut Headers) {
if headers.has::<AcceptEncoding>() {
return
}
headers.set(AcceptEncoding(vec![
qitem(Encoding::Gzip),
qitem(Encoding::Deflate),
qitem(Encoding::EncodingExt("br".to_owned()))
]));
}
pub fn set_default_accept_language(headers: &mut Headers) {
if headers.has::<AcceptLanguage>() {
return;
}
let mut en_us: LanguageTag = Default::default();
en_us.language = Some("en".to_owned());
en_us.region = Some("US".to_owned());
let mut en: LanguageTag = Default::default();
en.language = Some("en".to_owned());
headers.set(AcceptLanguage(vec![
qitem(en_us),
QualityItem::new(en, Quality(500)),
]));
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-state-no-referrer-when-downgrade
fn no_referrer_when_downgrade_header(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
return strip_url(referrer_url, false);
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin
fn strict_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
strip_url(referrer_url, true)
}
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-strict-origin-when-cross-origin
fn strict_origin_when_cross_origin(referrer_url: ServoUrl, url: ServoUrl) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" && url.scheme() != "https" {
return None;
}
let cross_origin = referrer_url.origin() != url.origin();
strip_url(referrer_url, cross_origin)
}
/// https://w3c.github.io/webappsec-referrer-policy/#strip-url
fn strip_url(mut referrer_url: ServoUrl, origin_only: bool) -> Option<ServoUrl> {
if referrer_url.scheme() == "https" || referrer_url.scheme() == "http" {
{
let referrer = referrer_url.as_mut_url();
referrer.set_username("").unwrap();
referrer.set_password(None).unwrap();
referrer.set_fragment(None);
if origin_only {
referrer.set_path("");
referrer.set_query(None);
}
}
return Some(referrer_url);
}
return None;
}
/// https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer
/// Steps 4-6.
pub fn determine_request_referrer(headers: &mut Headers,
referrer_policy: ReferrerPolicy,
referrer_source: ServoUrl,
current_url: ServoUrl)
-> Option<ServoUrl> {
assert!(!headers.has::<Referer>());
// FIXME(#14505): this does not seem to be the correct way of checking for
// same-origin requests.
let cross_origin = referrer_source.origin() != current_url.origin();
// FIXME(#14506): some of these cases are expected to consider whether the
// request's client is "TLS-protected", whatever that means.
match referrer_policy {
ReferrerPolicy::NoReferrer => None,
ReferrerPolicy::Origin => strip_url(referrer_source, true),
ReferrerPolicy::SameOrigin => if cross_origin { None } else { strip_url(referrer_source, false) },
ReferrerPolicy::UnsafeUrl => strip_url(referrer_source, false),
ReferrerPolicy::OriginWhenCrossOrigin => strip_url(referrer_source, cross_origin),
ReferrerPolicy::StrictOrigin => strict_origin(referrer_source, current_url),
ReferrerPolicy::StrictOriginWhenCrossOrigin => strict_origin_when_cross_origin(referrer_source, current_url),
ReferrerPolicy::NoReferrerWhenDowngrade => no_referrer_when_downgrade_header(referrer_source, current_url),
}
}
pub fn set_request_cookies(url: &ServoUrl, headers: &mut Headers, cookie_jar: &RwLock<CookieStorage>) {
let mut cookie_jar = cookie_jar.write().unwrap();
if let Some(cookie_list) = cookie_jar.cookies_for_url(url, CookieSource::HTTP) {
let mut v = Vec::new();
v.push(cookie_list.into_bytes());
headers.set_raw("Cookie".to_owned(), v);
}
}
fn set_cookie_for_url(cookie_jar: &RwLock<CookieStorage>,
request: &ServoUrl,
cookie_val: String) {
let mut cookie_jar = cookie_jar.write().unwrap();
let source = CookieSource::HTTP;
let header = Header::parse_header(&[cookie_val.into_bytes()]);
if let Ok(SetCookie(cookies)) = header {
for cookie in cookies {
if let Some(cookie) = cookie::Cookie::from_cookie_string(cookie, request, source) {
cookie_jar.push(cookie, request, source);
}
}
}
}
fn set_cookies_from_headers(url: &ServoUrl, headers: &Headers, cookie_jar: &RwLock<CookieStorage>) {
if let Some(cookies) = headers.get_raw("set-cookie") {
for cookie in cookies.iter() {
if let Ok(cookie_value) = String::from_utf8(cookie.clone()) {
set_cookie_for_url(&cookie_jar,
&url,
cookie_value);
}
}
}
}
struct StreamedResponse {
decoder: Decoder,
}
impl Read for StreamedResponse {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.decoder {
Decoder::Gzip(ref mut d) => d.read(buf),
Decoder::Deflate(ref mut d) => d.read(buf),
Decoder::Brotli(ref mut d) => d.read(buf),
Decoder::Plain(ref mut d) => d.read(buf)
}
}
}
impl StreamedResponse {
fn from_http_response(response: WrappedHttpResponse) -> io::Result<StreamedResponse> {
let decoder = match response.content_encoding() {
Some(Encoding::Gzip) => {
Decoder::Gzip(try!(GzDecoder::new(response)))
}
Some(Encoding::Deflate) => {
Decoder::Deflate(DeflateDecoder::new(response))
}
Some(Encoding::EncodingExt(ref ext)) if ext == "br" => {
Decoder::Brotli(Decompressor::new(response, 1024))
}
_ => {
Decoder::Plain(response)
}
};
Ok(StreamedResponse { decoder: decoder })
}
}
enum Decoder {
Gzip(GzDecoder<WrappedHttpResponse>),
Deflate(DeflateDecoder<WrappedHttpResponse>),
Brotli(Decompressor<WrappedHttpResponse>),
Plain(WrappedHttpResponse)
}
fn prepare_devtools_request(request_id: String,
url: ServoUrl,
method: Method,
headers: Headers,
body: Option<Vec<u8>>,
pipeline_id: PipelineId,
now: Tm,
connect_time: u64,
send_time: u64,
is_xhr: bool) -> ChromeToDevtoolsControlMsg {
let request = DevtoolsHttpRequest {
url: url,
method: method,
headers: headers,
body: body,
pipeline_id: pipeline_id,
startedDateTime: now,
timeStamp: now.to_timespec().sec,
connect_time: connect_time,
send_time: send_time,
is_xhr: is_xhr,
};
let net_event = NetworkEvent::HttpRequest(request);
ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event)
}
fn send_request_to_devtools(msg: ChromeToDevtoolsControlMsg,
devtools_chan: &Sender<DevtoolsControlMsg>) {
devtools_chan.send(DevtoolsControlMsg::FromChrome(msg)).unwrap();
}
fn send_response_to_devtools(devtools_chan: &Sender<DevtoolsControlMsg>,
request_id: String,
headers: Option<Headers>,
status: Option<(u16, Vec<u8>)>,
pipeline_id: PipelineId) {
let response = DevtoolsHttpResponse { headers: headers, status: status, body: None, pipeline_id: pipeline_id };
let net_event_response = NetworkEvent::HttpResponse(response);
let msg = ChromeToDevtoolsControlMsg::NetworkEvent(request_id, net_event_response);
let _ = devtools_chan.send(DevtoolsControlMsg::FromChrome(msg));
}
fn auth_from_cache(auth_cache: &RwLock<AuthCache>, origin: &ImmutableOrigin) -> Option<Basic> {
if let Some(ref auth_entry) = auth_cache.read().unwrap().entries.get(&origin.ascii_serialization()) {
let user_name = auth_entry.user_name.clone();
let password = Some(auth_entry.password.clone());
Some(Basic { username: user_name, password: password })
} else {
None
}
}
fn obtain_response(connector: Arc<Pool<Connector>>,
url: &ServoUrl,
method: &Method,
request_headers: &Headers,
data: &Option<Vec<u8>>,
load_data_method: &Method,
pipeline_id: &Option<PipelineId>,
iters: u32,
request_id: Option<&str>,
is_xhr: bool)
-> Result<(WrappedHttpResponse, Option<ChromeToDevtoolsControlMsg>), NetworkError> {
let null_data = None;
// loop trying connections in connection pool
// they may have grown stale (disconnected), in which case we'll get
// a ConnectionAborted error. this loop tries again with a new
// connection.
loop {
let mut headers = request_headers.clone();
// Avoid automatically sending request body if a redirect has occurred.
//
// TODO - This is the wrong behaviour according to the RFC. However, I'm not
// sure how much "correctness" vs. real-world is important in this case.
//
// https://tools.ietf.org/html/rfc7231#section-6.4
let is_redirected_request = iters != 1;
let request_body;
match data {
&Some(ref d) if !is_redirected_request => {
headers.set(ContentLength(d.len() as u64));
request_body = data;
}
_ => {
if *load_data_method != Method::Get && *load_data_method != Method::Head {
headers.set(ContentLength(0))
}
request_body = &null_data;
}
}
if log_enabled!(log::LogLevel::Info) {
info!("{} {}", method, url);
for header in headers.iter() {
info!(" - {}", header);
}
info!("{:?}", data);
}
let connect_start = precise_time_ms();
let request = HyperRequest::with_connector(method.clone(),
url.clone().into_url(),
&*connector);
let mut request = match request {
Ok(request) => request,
Err(e) => return Err(NetworkError::from_hyper_error(&url, e)),
};
*request.headers_mut() = headers.clone();
let connect_end = precise_time_ms();
let send_start = precise_time_ms();
let mut request_writer = match request.start() {
Ok(streaming) => streaming,
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
if let Some(ref data) = *request_body {
if let Err(e) = request_writer.write_all(&data) {
return Err(NetworkError::Internal(e.description().to_owned()))
}
}
let response = match request_writer.send() {
Ok(w) => w,
Err(HttpError::Io(ref io_error))
if io_error.kind() == io::ErrorKind::ConnectionAborted ||
io_error.kind() == io::ErrorKind::ConnectionReset => {
debug!("connection aborted ({:?}), possibly stale, trying new connection", io_error.description());
continue;
},
Err(e) => return Err(NetworkError::Internal(e.description().to_owned())),
};
let send_end = precise_time_ms();
let msg = if let Some(request_id) = request_id {
if let Some(pipeline_id) = *pipeline_id {
Some(prepare_devtools_request(
request_id.into(),
url.clone(), method.clone(), headers,
request_body.clone(), pipeline_id, time::now(),
connect_end - connect_start, send_end - send_start, is_xhr))
} else {
debug!("Not notifying devtools (no pipeline_id)");
None
}
} else {
debug!("Not notifying devtools (no request_id)");
None
};
return Ok((WrappedHttpResponse { response: response }, msg));
}
}
/// [HTTP fetch](https://fetch.spec.whatwg.org#http-fetch)
pub fn http_fetch(request: &mut Request,
cache: &mut CorsCache,
cors_flag: bool,
cors_preflight_flag: bool,
authentication_fetch_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// This is a new async fetch, reset the channel we are waiting on
*done_chan = None;
// Step 1
let mut response: Option<Response> = None;
// Step 2
// nothing to do, since actual_response is a function on response
// Step 3
if !request.skip_service_worker && !request.is_service_worker_global_scope {
// Substep 1
// TODO (handle fetch unimplemented)
if let Some(ref res) = response {
// Substep 2
// nothing to do, since actual_response is a function on response
// Substep 3
if (res.response_type == ResponseType::Opaque &&
request.mode != RequestMode::NoCors) ||
(res.response_type == ResponseType::OpaqueRedirect &&
request.redirect_mode != RedirectMode::Manual) ||
(res.url_list.len() > 1 && request.redirect_mode != RedirectMode::Follow) ||
res.is_network_error() {
return Response::network_error(NetworkError::Internal("Request failed".into()));
}
// Substep 4
// TODO: set response's CSP list on actual_response
}
}
// Step 4
let credentials = match request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
// Step 5
if response.is_none() {
// Substep 1
if cors_preflight_flag {
let method_cache_match = cache.match_method(&*request,
request.method.clone());
let method_mismatch = !method_cache_match && (!is_simple_method(&request.method) ||
request.use_cors_preflight);
let header_mismatch = request.headers.iter().any(|view|
!cache.match_header(&*request, view.name()) && !is_simple_header(&view)
);
// Sub-substep 1
if method_mismatch || header_mismatch {
let preflight_result = cors_preflight_fetch(&request, cache, context);
// Sub-substep 2
if let Some(e) = preflight_result.get_network_error() {
return Response::network_error(e.clone());
}
}
}
// Substep 2
request.skip_service_worker = true;
// Substep 3
let mut fetch_result = http_network_or_cache_fetch(
request, authentication_fetch_flag, cors_flag, done_chan, context);
// Substep 4
if cors_flag && cors_check(&request, &fetch_result).is_err() {
return Response::network_error(NetworkError::Internal("CORS check failed".into()));
}
fetch_result.return_internal = false;
response = Some(fetch_result);
}
// response is guaranteed to be something by now
let mut response = response.unwrap();
// Step 5
match response.actual_response().status {
// Code 301, 302, 303, 307, 308
status if status.map_or(false, is_redirect_status) => {
response = match request.redirect_mode {
RedirectMode::Error => Response::network_error(NetworkError::Internal("Redirect mode error".into())),
RedirectMode::Manual => {
response.to_filtered(ResponseType::OpaqueRedirect)
},
RedirectMode::Follow => {
// set back to default
response.return_internal = true;
http_redirect_fetch(request, cache, response,
cors_flag, target, done_chan, context)
}
}
},
// Code 401
Some(StatusCode::Unauthorized) => {
// Step 1
// FIXME: Figure out what to do with request window objects
if cors_flag || !credentials {
return response;
}
// Step 2
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Step 3
if !request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Step 4
return http_fetch(request, cache, cors_flag, cors_preflight_flag,
true, target, done_chan, context);
}
// Code 407
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_fetch(request, cache,
// cors_flag, cors_preflight_flag,
// authentication_fetch_flag, target,
// done_chan, context);
}
_ => { }
}
// Step 6
if authentication_fetch_flag {
// TODO: Create authentication entry for this request
}
// set back to default
response.return_internal = true;
// Step 7
response
}
/// [HTTP redirect fetch](https://fetch.spec.whatwg.org#http-redirect-fetch)
fn http_redirect_fetch(request: &mut Request,
cache: &mut CorsCache,
response: Response,
cors_flag: bool,
target: Target,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// Step 1
assert!(response.return_internal);
// Step 2
if !response.actual_response().headers.has::<Location>() {
return response;
}
// Step 3
let location = match response.actual_response().headers.get::<Location>() {
Some(&Location(ref location)) => location.clone(),
_ => return Response::network_error(NetworkError::Internal("Location header parsing failure".into()))
};
let response_url = response.actual_response().url().unwrap();
let location_url = response_url.join(&*location);
let location_url = match location_url {
Ok(url) => url,
_ => return Response::network_error(NetworkError::Internal("Location URL parsing failure".into()))
};
// Step 4
match location_url.scheme() {
"http" | "https" => { },
_ => return Response::network_error(NetworkError::Internal("Not an HTTP(S) Scheme".into()))
}
// Step 5
if request.redirect_count >= 20 {
return Response::network_error(NetworkError::Internal("Too many redirects".into()));
}
// Step 6
request.redirect_count += 1;
// Step 7
let same_origin = location_url.origin()== request.current_url().origin();
let has_credentials = has_credentials(&location_url);
if request.mode == RequestMode::CorsMode && !same_origin && has_credentials {
return Response::network_error(NetworkError::Internal("Cross-origin credentials check failed".into()));
}
// Step 8
if cors_flag && has_credentials {
return Response::network_error(NetworkError::Internal("Credentials check failed".into()));
}
// Step 9
if cors_flag && !same_origin {
request.origin = Origin::Origin(ImmutableOrigin::new_opaque());
}
// Step 10
let status_code = response.actual_response().status.unwrap();
if ((status_code == StatusCode::MovedPermanently || status_code == StatusCode::Found) &&
request.method == Method::Post) ||
status_code == StatusCode::SeeOther {
request.method = Method::Get;
request.body = None;
}
// Step 11
request.url_list.push(location_url);
// Step 12
// TODO implement referrer policy
// Step 13
main_fetch(request, cache, cors_flag, true, target, done_chan, context)
}
fn try_immutable_origin_to_hyper_origin(url_origin: &ImmutableOrigin) -> Option<HyperOrigin> {
match *url_origin {
// TODO (servo/servo#15569) Set "Origin: null" when hyper supports it
ImmutableOrigin::Opaque(_) => None,
ImmutableOrigin::Tuple(ref scheme, ref host, ref port) =>
Some(HyperOrigin::new(scheme.clone(), host.to_string(), Some(port.clone())))
}
}
/// [HTTP network or cache fetch](https://fetch.spec.whatwg.org#http-network-or-cache-fetch)
fn http_network_or_cache_fetch(request: &mut Request,
authentication_fetch_flag: bool,
cors_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement Window enum for Request
let request_has_no_window = true;
// Step 2
let mut http_request;
let http_request = if request_has_no_window &&
request.redirect_mode == RedirectMode::Error {
request
} else {
// Step 3
// TODO Implement body source
http_request = request.clone();
&mut http_request
};
// Step 4
let credentials_flag = match http_request.credentials_mode {
CredentialsMode::Include => true,
CredentialsMode::CredentialsSameOrigin if http_request.response_tainting == ResponseTainting::Basic
=> true,
_ => false
};
let content_length_value = match http_request.body {
None =>
match http_request.method {
// Step 6
Method::Post | Method::Put =>
Some(0),
// Step 5
_ => None
},
// Step 7
Some(ref http_request_body) => Some(http_request_body.len() as u64)
};
// Step 8
if let Some(content_length_value) = content_length_value {
http_request.headers.set(ContentLength(content_length_value));
}
// Step 9 TODO: needs request's client object
// Step 10
match http_request.referrer {
Referrer::NoReferrer => (),
Referrer::ReferrerUrl(ref http_request_referrer) =>
http_request.headers.set(Referer(http_request_referrer.to_string())),
Referrer::Client =>
// it should be impossible for referrer to be anything else during fetching
// https://fetch.spec.whatwg.org/#concept-request-referrer
unreachable!()
};
// Step 11
if !http_request.omit_origin_header {
let method = &http_request.method;
if cors_flag || (*method != Method::Get && *method != Method::Head) {
debug_assert!(http_request.origin != Origin::Client);
if let Origin::Origin(ref url_origin) = http_request.origin {
if let Some(hyper_origin) = try_immutable_origin_to_hyper_origin(url_origin) {
http_request.headers.set(hyper_origin)
}
}
}
}
// Step 12
if !http_request.headers.has::<UserAgent>() {
let user_agent = context.user_agent.clone().into_owned();
http_request.headers.set(UserAgent(user_agent));
}
match http_request.cache_mode {
// Step 13
CacheMode::Default if is_no_store_cache(&http_request.headers) => {
http_request.cache_mode = CacheMode::NoStore;
},
// Step 14
CacheMode::NoCache if !http_request.headers.has::<CacheControl>() => {
http_request.headers.set(CacheControl(vec![CacheDirective::MaxAge(0)]));
},
// Step 15
CacheMode::Reload | CacheMode::NoStore => {
// Substep 1
if !http_request.headers.has::<Pragma>() {
http_request.headers.set(Pragma::NoCache);
}
// Substep 2
if !http_request.headers.has::<CacheControl>() {
http_request.headers.set(CacheControl(vec![CacheDirective::NoCache]));
}
},
_ => {}
}
// Step 16
let current_url = http_request.current_url();
let host = Host {
hostname: current_url.host_str().unwrap().to_owned(),
port: current_url.port()
};
http_request.headers.set(host);
// unlike http_loader, we should not set the accept header
// here, according to the fetch spec
set_default_accept_encoding(&mut http_request.headers);
// Step 17
// TODO some of this step can't be implemented yet
if credentials_flag {
// Substep 1
// TODO http://mxr.mozilla.org/servo/source/components/net/http_loader.rs#504
// XXXManishearth http_loader has block_cookies: support content blocking here too
set_request_cookies(¤t_url,
&mut http_request.headers,
&context.state.cookie_jar);
// Substep 2
if !http_request.headers.has::<Authorization<String>>() {
// Substep 3
let mut authorization_value = None;
// Substep 4
if let Some(basic) = auth_from_cache(&context.state.auth_cache, ¤t_url.origin()) {
if !http_request.use_url_credentials || !has_credentials(¤t_url) {
authorization_value = Some(basic);
}
}
// Substep 5
if authentication_fetch_flag && authorization_value.is_none() {
if has_credentials(¤t_url) {
authorization_value = Some(Basic {
username: current_url.username().to_owned(),
password: current_url.password().map(str::to_owned)
})
}
}
// Substep 6
if let Some(basic) = authorization_value {
http_request.headers.set(Authorization(basic));
}
}
}
// Step 18
// TODO If there’s a proxy-authentication entry, use it as appropriate.
// Step 19
let mut response: Option<Response> = None;
// Step 20
let mut revalidation_needed = false;
// Step 21
// TODO have a HTTP cache to check for a completed response
let complete_http_response_from_cache: Option<Response> = None;
if http_request.cache_mode != CacheMode::NoStore &&
http_request.cache_mode != CacheMode::Reload &&
complete_http_response_from_cache.is_some() {
// TODO Substep 1 and 2. Select a response from HTTP cache.
// Substep 3
if let Some(ref response) = response {
revalidation_needed = response_needs_revalidation(&response);
};
// Substep 4
if http_request.cache_mode == CacheMode::ForceCache ||
http_request.cache_mode == CacheMode::OnlyIfCached {
// TODO pull response from HTTP cache
// response = http_request
}
if revalidation_needed {
// Substep 5
// TODO set If-None-Match and If-Modified-Since according to cached
// response headers.
} else {
// Substep 6
// TODO pull response from HTTP cache
// response = http_request
// response.cache_state = CacheState::Local;
}
}
// Step 22
if response.is_none() {
// Substep 1
if http_request.cache_mode == CacheMode::OnlyIfCached {
return Response::network_error(
NetworkError::Internal("Couldn't find response in cache".into()))
}
// Substep 2
let forward_response = http_network_fetch(http_request, credentials_flag,
done_chan, context);
match forward_response.raw_status {
// Substep 3
Some((200...303, _)) |
Some((305...399, _)) => {
if !http_request.method.safe() {
// TODO Invalidate HTTP cache response
}
},
// Substep 4
Some((304, _)) => {
if revalidation_needed {
// TODO update forward_response headers with cached response
// headers
}
},
_ => {}
}
// Substep 5
if response.is_none() {
response = Some(forward_response);
}
}
let response = response.unwrap();
match response.status {
Some(StatusCode::Unauthorized) => {
// Step 23
// FIXME: Figure out what to do with request window objects
if cors_flag && !credentials_flag {
return response;
}
// Substep 1
// TODO: Spec says requires testing on multiple WWW-Authenticate headers
// Substep 2
if http_request.body.is_some() {
// TODO Implement body source
}
// Substep 3
if !http_request.use_url_credentials || authentication_fetch_flag {
// TODO: Prompt the user for username and password from the window
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
}
// Substep 4
return http_network_or_cache_fetch(http_request,
true /* authentication flag */,
cors_flag, done_chan, context);
},
Some(StatusCode::ProxyAuthenticationRequired) => {
// Step 24
// Step 1
// TODO: Figure out what to do with request window objects
// Step 2
// TODO: Spec says requires testing on Proxy-Authenticate headers
// Step 3
// TODO: Prompt the user for proxy authentication credentials
// Wrong, but will have to do until we are able to prompt the user
// otherwise this creates an infinite loop
// We basically pretend that the user declined to enter credentials
return response;
// Step 4
// return http_network_or_cache_fetch(request, authentication_fetch_flag,
// cors_flag, done_chan, context);
},
_ => {}
}
// Step 25
if authentication_fetch_flag {
// TODO Create the authentication entry for request and the given realm
}
// Step 26
response
}
/// [HTTP network fetch](https://fetch.spec.whatwg.org/#http-network-fetch)
fn http_network_fetch(request: &Request,
credentials_flag: bool,
done_chan: &mut DoneChannel,
context: &FetchContext)
-> Response {
// TODO: Implement HTTP network fetch spec
// Step 1
// nothing to do here, since credentials_flag is already a boolean
// Step 2
// TODO be able to create connection using current url's origin and credentials
// Step 3
// TODO be able to tell if the connection is a failure
// Step 4
let url = request.current_url();
let request_id = context.devtools_chan.as_ref().map(|_| {
uuid::Uuid::new_v4().simple().to_string()
});
// XHR uses the default destination; other kinds of fetches (which haven't been implemented yet)
// do not. Once we support other kinds of fetches we'll need to be more fine grained here
// since things like image fetches are classified differently by devtools
let is_xhr = request.destination == Destination::None;
let wrapped_response = obtain_response(context.connector.clone(), &url, &request.method,
&request.headers,
&request.body, &request.method,
&request.pipeline_id, request.redirect_count + 1,
request_id.as_ref().map(Deref::deref), is_xhr);
let pipeline_id = request.pipeline_id;
let (res, msg) = match wrapped_response {
Ok(wrapped_response) => wrapped_response,
Err(error) => return Response::network_error(error),
};
let mut response = Response::new(url.clone());
response.status = Some(res.response.status);
response.raw_status = Some((res.response.status_raw().0,
res.response.status_raw().1.as_bytes().to_vec()));
response.headers = res.response.headers.clone();
response.referrer = request.referrer.to_url().cloned();
let res_body = response.body.clone();
// We're about to spawn a thread to be waited on here
let (done_sender, done_receiver) = channel();
*done_chan = Some((done_sender.clone(), done_receiver));
let meta = match response.metadata().expect("Response metadata should exist at this stage") {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_
};
let devtools_sender = context.devtools_chan.clone();
let meta_status = meta.status.clone();
let meta_headers = meta.headers.clone();
thread::Builder::new().name(format!("fetch worker thread")).spawn(move || {
match StreamedResponse::from_http_response(res) {
Ok(mut res) => {
*res_body.lock().unwrap() = ResponseBody::Receiving(vec![]);
if let Some(ref sender) = devtools_sender {
if let Some(m) = msg {
send_request_to_devtools(m, &sender);
}
// --- Tell devtools that we got a response
// Send an HttpResponse message to devtools with the corresponding request_id
if let Some(pipeline_id) = pipeline_id {
send_response_to_devtools(
&sender, request_id.unwrap(),
meta_headers.map(Serde::into_inner),
meta_status,
pipeline_id);
}
}
loop {
match read_block(&mut res) {
Ok(Data::Payload(chunk)) => {
if let ResponseBody::Receiving(ref mut body) = *res_body.lock().unwrap() {
body.extend_from_slice(&chunk);
let _ = done_sender.send(Data::Payload(chunk));
}
},
Ok(Data::Done) | Err(_) => {
let mut body = res_body.lock().unwrap();
let completed_body = match *body {
ResponseBody::Receiving(ref mut body) => {
mem::replace(body, vec![])
},
_ => vec![],
};
*body = ResponseBody::Done(completed_body);
let _ = done_sender.send(Data::Done);
break;
}
}
}
}
Err(_) => {
// XXXManishearth we should propagate this error somehow
*res_body.lock().unwrap() = ResponseBody::Done(vec![]);
let _ = done_sender.send(Data::Done);
}
}
}).expect("Thread spawning failed");
// TODO these substeps aren't possible yet
// Substep 1
// Substep 2
// TODO Determine if response was retrieved over HTTPS
// TODO Servo needs to decide what ciphers are to be treated as "deprecated"
response.https_state = HttpsState::None;
// TODO Read request
// Step 5-9
// (needs stream bodies)
// Step 10
// TODO when https://bugzilla.mozilla.org/show_bug.cgi?id=1030660
// is resolved, this step will become uneccesary
// TODO this step
if let Some(encoding) = response.headers.get::<ContentEncoding>() {
if encoding.contains(&Encoding::Gzip) {
}
else if encoding.contains(&Encoding::Compress) {
}
};
// Step 11
// TODO this step isn't possible yet (CSP)
// Step 12
if response.is_network_error() && request.cache_mode == CacheMode::NoStore {
// TODO update response in the HTTP cache for request
}
// TODO this step isn't possible yet
// Step 13
// Step 14.
if credentials_flag {
set_cookies_from_headers(&url, &response.headers, &context.state.cookie_jar);
}
// TODO these steps
// Step 15
// Substep 1
// Substep 2
// Sub-substep 1
// Sub-substep 2
// Sub-substep 3
// Sub-substep 4
// Substep 3
// Step 16
response
}
/// [CORS preflight fetch](https://fetch.spec.whatwg.org#cors-preflight-fetch)
fn cors_preflight_fetch(request: &Request,
cache: &mut CorsCache,
context: &FetchContext)
-> Response {
// Step 1
let mut preflight = Request::new(request.current_url(), Some(request.origin.clone()),
request.is_service_worker_global_scope, request.pipeline_id);
preflight.method = Method::Options;
preflight.initiator = request.initiator.clone();
preflight.type_ = request.type_.clone();
preflight.destination = request.destination.clone();
preflight.referrer = request.referrer.clone();
preflight.referrer_policy = request.referrer_policy;
// Step 2
preflight.headers.set::<AccessControlRequestMethod>(
AccessControlRequestMethod(request.method.clone()));
// Step 3, 4
let mut value = request.headers
.iter()
.filter(|view| !is_simple_header(view))
.map(|view| UniCase(view.name().to_owned()))
.collect::<Vec<UniCase<String>>>();
value.sort();
// Step 5
preflight.headers.set::<AccessControlRequestHeaders>(
AccessControlRequestHeaders(value));
// Step 6
let response = http_network_or_cache_fetch(&mut preflight, false, false, &mut None, context);
// Step 7
if cors_check(&request, &response).is_ok() &&
response.status.map_or(false, |status| status.is_success()) {
// Substep 1
let mut methods = if response.headers.has::<AccessControlAllowMethods>() {
match response.headers.get::<AccessControlAllowMethods>() {
Some(&AccessControlAllowMethods(ref m)) => m.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAM check failed".into()))
}
} else {
vec![]
};
// Substep 2
let header_names = if response.headers.has::<AccessControlAllowHeaders>() {
match response.headers.get::<AccessControlAllowHeaders>() {
Some(&AccessControlAllowHeaders(ref hn)) => hn.clone(),
// Substep 3
None => return Response::network_error(NetworkError::Internal("CORS ACAH check failed".into()))
}
} else {
vec![]
};
// Substep 4
if methods.is_empty() && request.use_cors_preflight {
methods = vec![request.method.clone()];
}
// Substep 5
debug!("CORS check: Allowed methods: {:?}, current method: {:?}",
methods, request.method);
if methods.iter().all(|method| *method != request.method) &&
!is_simple_method(&request.method) {
return Response::network_error(NetworkError::Internal("CORS method check failed".into()));
}
// Substep 6
debug!("CORS check: Allowed headers: {:?}, current headers: {:?}", header_names, request.headers);
let set: HashSet<&UniCase<String>> = HashSet::from_iter(header_names.iter());
if request.headers.iter().any(|ref hv| !set.contains(&UniCase(hv.name().to_owned())) && !is_simple_header(hv)) {
return Response::network_error(NetworkError::Internal("CORS headers check failed".into()));
}
// Substep 7, 8
let max_age = response.headers.get::<AccessControlMaxAge>().map(|acma| acma.0).unwrap_or(0);
// TODO: Substep 9 - Need to define what an imposed limit on max-age is
// Substep 11, 12
for method in &methods {
cache.match_method_and_update(&*request, method.clone(), max_age);
}
// Substep 13, 14
for header_name in &header_names {
cache.match_header_and_update(&*request, &*header_name, max_age);
}
// Substep 15
return response;
}
// Step 8
Response::network_error(NetworkError::Internal("CORS check failed".into()))
}
/// [CORS check](https://fetch.spec.whatwg.org#concept-cors-check)
fn cors_check(request: &Request, response: &Response) -> Result<(), ()> {
// Step 1
let origin = response.headers.get::<AccessControlAllowOrigin>().cloned();
// Step 2
let origin = try!(origin.ok_or(()));
// Step 3
if request.credentials_mode != CredentialsMode::Include &&
origin == AccessControlAllowOrigin::Any {
return Ok(());
}
// Step 4
let origin = match origin {
AccessControlAllowOrigin::Value(origin) => origin,
// if it's Any or Null at this point, there's nothing to do but return Err(())
_ => return Err(())
};
match request.origin {
Origin::Origin(ref o) if o.ascii_serialization() == origin => {},
_ => return Err(())
}
// Step 5
if request.credentials_mode != CredentialsMode::Include {
return Ok(());
}
// Step 6
let credentials = response.headers.get::<AccessControlAllowCredentials>().cloned();
// Step 7
if credentials.is_some() {
return Ok(());
}
// Step 8
Err(())
}
fn has_credentials(url: &ServoUrl) -> bool {
!url.username().is_empty() || url.password().is_some()
}
fn is_no_store_cache(headers: &Headers) -> bool {
headers.has::<IfModifiedSince>() | headers.has::<IfNoneMatch>() |
headers.has::<IfUnmodifiedSince>() | headers.has::<IfMatch>() |
headers.has::<IfRange>()
}
fn response_needs_revalidation(_response: &Response) -> bool {
// TODO this function
false
}
/// https://fetch.spec.whatwg.org/#redirect-status
pub fn is_redirect_status(status: StatusCode) -> bool {
match status {
StatusCode::MovedPermanently |
StatusCode::Found |
StatusCode::SeeOther |
StatusCode::TemporaryRedirect |
StatusCode::PermanentRedirect => true,
_ => false,
}
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSStyleDeclarationBinding::CSSStyleDeclarationMethods;
use crate::dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use crate::dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use crate::dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::conversions::{jsstring_to_str, ConversionResult, FromJSValConvertible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::AnimationFrameCallback;
use crate::dom::element::Element;
use crate::dom::globalscope::GlobalScope;
use crate::dom::node::{window_from_node, Node, ShadowIncluding};
use crate::dom::window::Window;
use crate::script_thread::Documents;
use devtools_traits::TimelineMarkerType;
use devtools_traits::{AutoMargins, CachedConsoleMessage, CachedConsoleMessageTypes};
use devtools_traits::{ComputedNodeLayout, ConsoleAPI, PageError};
use devtools_traits::{EvaluateJSReply, Modification, NodeInfo, TimelineMarker};
use ipc_channel::ipc::IpcSender;
use js::jsapi::JSAutoCompartment;
use js::jsval::UndefinedValue;
use js::rust::wrappers::ObjectClassName;
use msg::constellation_msg::PipelineId;
use std::ffi::CStr;
use std::str;
use uuid::Uuid;
#[allow(unsafe_code)]
pub fn handle_evaluate_js(global: &GlobalScope, eval: String, reply: IpcSender<EvaluateJSReply>) {
// global.get_cx() returns a valid `JSContext` pointer, so this is safe.
let result = unsafe {
let cx = global.get_cx();
let globalhandle = global.reflector().get_jsobject();
let _ac = JSAutoCompartment::new(cx, globalhandle.get());
rooted!(in(cx) let mut rval = UndefinedValue());
global.evaluate_js_on_global_with_result(&eval, rval.handle_mut());
if rval.is_undefined() {
EvaluateJSReply::VoidValue
} else if rval.is_boolean() {
EvaluateJSReply::BooleanValue(rval.to_boolean())
} else if rval.is_double() || rval.is_int32() {
EvaluateJSReply::NumberValue(
match FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
Ok(ConversionResult::Success(v)) => v,
_ => unreachable!(),
},
)
} else if rval.is_string() {
EvaluateJSReply::StringValue(String::from(jsstring_to_str(cx, rval.to_string())))
} else if rval.is_null() {
EvaluateJSReply::NullValue
} else {
assert!(rval.is_object());
rooted!(in(cx) let obj = rval.to_object());
let class_name = CStr::from_ptr(ObjectClassName(cx, obj.handle()));
let class_name = str::from_utf8(class_name.to_bytes()).unwrap();
EvaluateJSReply::ActorValue {
class: class_name.to_owned(),
uuid: Uuid::new_v4().to_string(),
}
}
};
reply.send(result).unwrap();
}
pub fn handle_get_root_node(
documents: &Documents,
pipeline: PipelineId,
reply: IpcSender<Option<NodeInfo>>,
) {
let info = documents
.find_document(pipeline)
.map(|document| document.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
pub fn handle_get_document_element(
documents: &Documents,
pipeline: PipelineId,
reply: IpcSender<Option<NodeInfo>>,
) {
let info = documents
.find_document(pipeline)
.and_then(|document| document.GetDocumentElement())
.map(|element| element.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
fn find_node_by_unique_id(
documents: &Documents,
pipeline: PipelineId,
node_id: &str,
) -> Option<DomRoot<Node>> {
documents.find_document(pipeline).and_then(|document| {
document
.upcast::<Node>()
.traverse_preorder(ShadowIncluding::No)
.find(|candidate| candidate.unique_id() == node_id)
})
}
pub fn handle_get_children(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<Vec<NodeInfo>>>,
) {
match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(parent) => {
let children = parent.children().map(|child| child.summarize()).collect();
reply.send(Some(children)).unwrap();
},
};
}
pub fn handle_get_layout(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<ComputedNodeLayout>>,
) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(found_node) => found_node,
};
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
let rect = elem.GetBoundingClientRect();
let width = rect.Width() as f32;
let height = rect.Height() as f32;
let window = window_from_node(&*node);
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
let computed_style = window.GetComputedStyle(elem, None);
reply
.send(Some(ComputedNodeLayout {
display: String::from(computed_style.Display()),
position: String::from(computed_style.Position()),
zIndex: String::from(computed_style.ZIndex()),
boxSizing: String::from(computed_style.BoxSizing()),
autoMargins: determine_auto_margins(&window, &*node),
marginTop: String::from(computed_style.MarginTop()),
marginRight: String::from(computed_style.MarginRight()),
marginBottom: String::from(computed_style.MarginBottom()),
marginLeft: String::from(computed_style.MarginLeft()),
borderTopWidth: String::from(computed_style.BorderTopWidth()),
borderRightWidth: String::from(computed_style.BorderRightWidth()),
borderBottomWidth: String::from(computed_style.BorderBottomWidth()),
borderLeftWidth: String::from(computed_style.BorderLeftWidth()),
paddingTop: String::from(computed_style.PaddingTop()),
paddingRight: String::from(computed_style.PaddingRight()),
paddingBottom: String::from(computed_style.PaddingBottom()),
paddingLeft: String::from(computed_style.PaddingLeft()),
width: width,
height: height,
}))
.unwrap();
}
fn determine_auto_margins(window: &Window, node: &Node) -> AutoMargins {
let style = window.style_query(node.to_trusted_node_address()).unwrap();
let margin = style.get_margin();
AutoMargins {
top: margin.margin_top.is_auto(),
right: margin.margin_right.is_auto(),
bottom: margin.margin_bottom.is_auto(),
left: margin.margin_left.is_auto(),
}
}
pub fn handle_get_cached_messages(
_pipeline_id: PipelineId,
message_types: CachedConsoleMessageTypes,
reply: IpcSender<Vec<CachedConsoleMessage>>,
) {
// TODO: check the messageTypes against a global Cache for console messages and page exceptions
let mut messages = Vec::new();
if message_types.contains(CachedConsoleMessageTypes::PAGE_ERROR) {
// TODO: make script error reporter pass all reported errors
// to devtools and cache them for returning here.
let msg = PageError {
type_: "PageError".to_owned(),
errorMessage: "page error test".to_owned(),
sourceName: String::new(),
lineText: String::new(),
lineNumber: 0,
columnNumber: 0,
category: String::new(),
timeStamp: 0,
error: false,
warning: false,
exception: false,
strict: false,
private: false,
};
messages.push(CachedConsoleMessage::PageError(msg));
}
if message_types.contains(CachedConsoleMessageTypes::CONSOLE_API) {
// TODO: do for real
let msg = ConsoleAPI {
type_: "ConsoleAPI".to_owned(),
level: "error".to_owned(),
filename: "http://localhost/~mihai/mozilla/test.html".to_owned(),
lineNumber: 0,
functionName: String::new(),
timeStamp: 0,
private: false,
arguments: vec!["console error test".to_owned()],
};
messages.push(CachedConsoleMessage::ConsoleAPI(msg));
}
reply.send(messages).unwrap();
}
pub fn handle_modify_attribute(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
modifications: Vec<Modification>,
) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => {
return warn!(
"node id {} for pipeline id {} is not found",
&node_id, &pipeline
);
},
Some(found_node) => found_node,
};
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
for modification in modifications {
match modification.newValue {
Some(string) => {
let _ = elem.SetAttribute(
DOMString::from(modification.attributeName),
DOMString::from(string),
);
},
None => elem.RemoveAttribute(DOMString::from(modification.attributeName)),
}
}
}
pub fn handle_wants_live_notifications(global: &GlobalScope, send_notifications: bool) {
global.set_devtools_wants_updates(send_notifications);
}
pub fn handle_set_timeline_markers(
documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>,
reply: IpcSender<Option<TimelineMarker>>,
) {
match documents.find_window(pipeline) {
None => reply.send(None).unwrap(),
Some(window) => window.set_devtools_timeline_markers(marker_types, reply),
}
}
pub fn handle_drop_timeline_markers(
documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>,
) {
if let Some(window) = documents.find_window(pipeline) {
window.drop_devtools_timeline_markers(marker_types);
}
}
pub fn handle_request_animation_frame(documents: &Documents, id: PipelineId, actor_name: String) {
if let Some(doc) = documents.find_document(id) {
doc.request_animation_frame(AnimationFrameCallback::DevtoolsFramerateTick { actor_name });
}
}
pub fn handle_reload(documents: &Documents, id: PipelineId) {
if let Some(win) = documents.find_window(id) {
win.Location().reload_without_origin_check();
}
}
Make devtools find node by unique id function include shadow trees
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CSSStyleDeclarationBinding::CSSStyleDeclarationMethods;
use crate::dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use crate::dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use crate::dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::conversions::{jsstring_to_str, ConversionResult, FromJSValConvertible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::AnimationFrameCallback;
use crate::dom::element::Element;
use crate::dom::globalscope::GlobalScope;
use crate::dom::node::{window_from_node, Node, ShadowIncluding};
use crate::dom::window::Window;
use crate::script_thread::Documents;
use devtools_traits::TimelineMarkerType;
use devtools_traits::{AutoMargins, CachedConsoleMessage, CachedConsoleMessageTypes};
use devtools_traits::{ComputedNodeLayout, ConsoleAPI, PageError};
use devtools_traits::{EvaluateJSReply, Modification, NodeInfo, TimelineMarker};
use ipc_channel::ipc::IpcSender;
use js::jsapi::JSAutoCompartment;
use js::jsval::UndefinedValue;
use js::rust::wrappers::ObjectClassName;
use msg::constellation_msg::PipelineId;
use std::ffi::CStr;
use std::str;
use uuid::Uuid;
#[allow(unsafe_code)]
pub fn handle_evaluate_js(global: &GlobalScope, eval: String, reply: IpcSender<EvaluateJSReply>) {
// global.get_cx() returns a valid `JSContext` pointer, so this is safe.
let result = unsafe {
let cx = global.get_cx();
let globalhandle = global.reflector().get_jsobject();
let _ac = JSAutoCompartment::new(cx, globalhandle.get());
rooted!(in(cx) let mut rval = UndefinedValue());
global.evaluate_js_on_global_with_result(&eval, rval.handle_mut());
if rval.is_undefined() {
EvaluateJSReply::VoidValue
} else if rval.is_boolean() {
EvaluateJSReply::BooleanValue(rval.to_boolean())
} else if rval.is_double() || rval.is_int32() {
EvaluateJSReply::NumberValue(
match FromJSValConvertible::from_jsval(cx, rval.handle(), ()) {
Ok(ConversionResult::Success(v)) => v,
_ => unreachable!(),
},
)
} else if rval.is_string() {
EvaluateJSReply::StringValue(String::from(jsstring_to_str(cx, rval.to_string())))
} else if rval.is_null() {
EvaluateJSReply::NullValue
} else {
assert!(rval.is_object());
rooted!(in(cx) let obj = rval.to_object());
let class_name = CStr::from_ptr(ObjectClassName(cx, obj.handle()));
let class_name = str::from_utf8(class_name.to_bytes()).unwrap();
EvaluateJSReply::ActorValue {
class: class_name.to_owned(),
uuid: Uuid::new_v4().to_string(),
}
}
};
reply.send(result).unwrap();
}
pub fn handle_get_root_node(
documents: &Documents,
pipeline: PipelineId,
reply: IpcSender<Option<NodeInfo>>,
) {
let info = documents
.find_document(pipeline)
.map(|document| document.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
pub fn handle_get_document_element(
documents: &Documents,
pipeline: PipelineId,
reply: IpcSender<Option<NodeInfo>>,
) {
let info = documents
.find_document(pipeline)
.and_then(|document| document.GetDocumentElement())
.map(|element| element.upcast::<Node>().summarize());
reply.send(info).unwrap();
}
fn find_node_by_unique_id(
documents: &Documents,
pipeline: PipelineId,
node_id: &str,
) -> Option<DomRoot<Node>> {
documents.find_document(pipeline).and_then(|document| {
document
.upcast::<Node>()
.traverse_preorder(ShadowIncluding::Yes)
.find(|candidate| candidate.unique_id() == node_id)
})
}
pub fn handle_get_children(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<Vec<NodeInfo>>>,
) {
match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(parent) => {
let children = parent.children().map(|child| child.summarize()).collect();
reply.send(Some(children)).unwrap();
},
};
}
pub fn handle_get_layout(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
reply: IpcSender<Option<ComputedNodeLayout>>,
) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => return reply.send(None).unwrap(),
Some(found_node) => found_node,
};
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
let rect = elem.GetBoundingClientRect();
let width = rect.Width() as f32;
let height = rect.Height() as f32;
let window = window_from_node(&*node);
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
let computed_style = window.GetComputedStyle(elem, None);
reply
.send(Some(ComputedNodeLayout {
display: String::from(computed_style.Display()),
position: String::from(computed_style.Position()),
zIndex: String::from(computed_style.ZIndex()),
boxSizing: String::from(computed_style.BoxSizing()),
autoMargins: determine_auto_margins(&window, &*node),
marginTop: String::from(computed_style.MarginTop()),
marginRight: String::from(computed_style.MarginRight()),
marginBottom: String::from(computed_style.MarginBottom()),
marginLeft: String::from(computed_style.MarginLeft()),
borderTopWidth: String::from(computed_style.BorderTopWidth()),
borderRightWidth: String::from(computed_style.BorderRightWidth()),
borderBottomWidth: String::from(computed_style.BorderBottomWidth()),
borderLeftWidth: String::from(computed_style.BorderLeftWidth()),
paddingTop: String::from(computed_style.PaddingTop()),
paddingRight: String::from(computed_style.PaddingRight()),
paddingBottom: String::from(computed_style.PaddingBottom()),
paddingLeft: String::from(computed_style.PaddingLeft()),
width: width,
height: height,
}))
.unwrap();
}
fn determine_auto_margins(window: &Window, node: &Node) -> AutoMargins {
let style = window.style_query(node.to_trusted_node_address()).unwrap();
let margin = style.get_margin();
AutoMargins {
top: margin.margin_top.is_auto(),
right: margin.margin_right.is_auto(),
bottom: margin.margin_bottom.is_auto(),
left: margin.margin_left.is_auto(),
}
}
pub fn handle_get_cached_messages(
_pipeline_id: PipelineId,
message_types: CachedConsoleMessageTypes,
reply: IpcSender<Vec<CachedConsoleMessage>>,
) {
// TODO: check the messageTypes against a global Cache for console messages and page exceptions
let mut messages = Vec::new();
if message_types.contains(CachedConsoleMessageTypes::PAGE_ERROR) {
// TODO: make script error reporter pass all reported errors
// to devtools and cache them for returning here.
let msg = PageError {
type_: "PageError".to_owned(),
errorMessage: "page error test".to_owned(),
sourceName: String::new(),
lineText: String::new(),
lineNumber: 0,
columnNumber: 0,
category: String::new(),
timeStamp: 0,
error: false,
warning: false,
exception: false,
strict: false,
private: false,
};
messages.push(CachedConsoleMessage::PageError(msg));
}
if message_types.contains(CachedConsoleMessageTypes::CONSOLE_API) {
// TODO: do for real
let msg = ConsoleAPI {
type_: "ConsoleAPI".to_owned(),
level: "error".to_owned(),
filename: "http://localhost/~mihai/mozilla/test.html".to_owned(),
lineNumber: 0,
functionName: String::new(),
timeStamp: 0,
private: false,
arguments: vec!["console error test".to_owned()],
};
messages.push(CachedConsoleMessage::ConsoleAPI(msg));
}
reply.send(messages).unwrap();
}
pub fn handle_modify_attribute(
documents: &Documents,
pipeline: PipelineId,
node_id: String,
modifications: Vec<Modification>,
) {
let node = match find_node_by_unique_id(documents, pipeline, &*node_id) {
None => {
return warn!(
"node id {} for pipeline id {} is not found",
&node_id, &pipeline
);
},
Some(found_node) => found_node,
};
let elem = node
.downcast::<Element>()
.expect("should be getting layout of element");
for modification in modifications {
match modification.newValue {
Some(string) => {
let _ = elem.SetAttribute(
DOMString::from(modification.attributeName),
DOMString::from(string),
);
},
None => elem.RemoveAttribute(DOMString::from(modification.attributeName)),
}
}
}
pub fn handle_wants_live_notifications(global: &GlobalScope, send_notifications: bool) {
global.set_devtools_wants_updates(send_notifications);
}
pub fn handle_set_timeline_markers(
documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>,
reply: IpcSender<Option<TimelineMarker>>,
) {
match documents.find_window(pipeline) {
None => reply.send(None).unwrap(),
Some(window) => window.set_devtools_timeline_markers(marker_types, reply),
}
}
pub fn handle_drop_timeline_markers(
documents: &Documents,
pipeline: PipelineId,
marker_types: Vec<TimelineMarkerType>,
) {
if let Some(window) = documents.find_window(pipeline) {
window.drop_devtools_timeline_markers(marker_types);
}
}
pub fn handle_request_animation_frame(documents: &Documents, id: PipelineId, actor_name: String) {
if let Some(doc) = documents.find_document(id) {
doc.request_animation_frame(AnimationFrameCallback::DevtoolsFramerateTick { actor_name });
}
}
pub fn handle_reload(documents: &Documents, id: PipelineId) {
if let Some(win) = documents.find_window(id) {
win.Location().reload_without_origin_check();
}
}
|
//! Implements the ODBC Environment
mod list_data_sources;
pub use self::list_data_sources::{DataSourceInfo, DriverInfo};
use super::{ffi, into_result, safe, try_into_option, DiagnosticRecord, GetDiagRec, Handle, Result};
use std;
/// Environment state used to represent that environment has been set to odbc version 3
pub type Version3 = safe::Odbc3;
/// Handle to an ODBC Environment
///
/// Creating an instance of this type is the first thing you do then using ODBC. The environment
/// must outlive all connections created with it.
pub struct Environment<V> {
safe: safe::Environment<V>,
}
impl<V> Handle for Environment<V> {
type To = ffi::Env;
unsafe fn handle(&self) -> ffi::SQLHENV {
self.safe.as_raw()
}
}
impl<V: safe::Version> Environment<V> {
/// Creates an ODBC Environment and declares specifaciton of `V` are used
///
/// # Example
/// ```
/// use odbc::*;
/// fn do_database_stuff() -> std::result::Result<(), Option<DiagnosticRecord>> {
/// let env : Environment<Version3> = Environment::new()?; // first thing to do
/// // ...
/// Ok(())
/// }
/// ```
///
/// You can use the shorthand `create_environment_v3()` instead.
pub fn new() -> std::result::Result<Environment<V>, Option<DiagnosticRecord>> {
let safe = match safe::Environment::new() {
safe::Success(v) => v,
safe::Info(v) => {
warn!("{}", v.get_diag_rec(1).unwrap());
v
}
safe::Error(()) => return Err(None),
};
let safe = into_result(safe.declare_version())?;
Ok(Environment { safe })
}
}
unsafe impl<V> safe::Handle for Environment<V> {
fn handle(&self) -> ffi::SQLHANDLE {
self.safe.as_raw() as ffi::SQLHANDLE
}
fn handle_type() -> ffi::HandleType {
ffi::SQL_HANDLE_ENV
}
}
/// Creates an ODBC Environment and declares specifaciton of version 3.0 are used
///
/// # Example
/// ```
/// use odbc::*;
/// fn do_database_stuff() -> std::result::Result<(), Option<DiagnosticRecord>> {
/// let env = create_environment_v3()?; // first thing to do
/// // ...
/// Ok(())
/// }
/// ```
pub fn create_environment_v3()
-> std::result::Result<Environment<Version3>, Option<DiagnosticRecord>>
{
Environment::new()
}
explain Result<_, Option<DiagRec>>
//! Implements the ODBC Environment
mod list_data_sources;
pub use self::list_data_sources::{DataSourceInfo, DriverInfo};
use super::{ffi, into_result, safe, try_into_option, DiagnosticRecord, GetDiagRec, Handle, Result};
use std;
/// Environment state used to represent that environment has been set to odbc version 3
pub type Version3 = safe::Odbc3;
/// Handle to an ODBC Environment
///
/// Creating an instance of this type is the first thing you do then using ODBC. The environment
/// must outlive all connections created with it.
pub struct Environment<V> {
safe: safe::Environment<V>,
}
impl<V> Handle for Environment<V> {
type To = ffi::Env;
unsafe fn handle(&self) -> ffi::SQLHENV {
self.safe.as_raw()
}
}
impl<V: safe::Version> Environment<V> {
/// Creates an ODBC Environment and declares specifaciton of `V` are used. You can use the
/// shorthand `create_environment_v3()` instead.
///
/// # Example
/// ```
/// use odbc::*;
/// fn do_database_stuff() -> std::result::Result<(), Option<DiagnosticRecord>> {
/// let env : Environment<Version3> = Environment::new()?; // first thing to do
/// // ...
/// Ok(())
/// }
/// ```
///
/// # Return
///
/// While most functions in this crate return a `DiagnosticRecord` in the event of an Error the
/// creation of an environment is special. Since `DiagnosticRecord`s are created using the
/// environment, at least its allocation has to be successful to obtain one. If the allocation
/// fails it is sadly not possible to receive further Diagnostics. Setting an unsupported version
/// may however result in an ordinary `Some(DiagnosticRecord)`.
/// ```
pub fn new() -> std::result::Result<Environment<V>, Option<DiagnosticRecord>> {
let safe = match safe::Environment::new() {
safe::Success(v) => v,
safe::Info(v) => {
warn!("{}", v.get_diag_rec(1).unwrap());
v
}
safe::Error(()) => return Err(None),
};
let safe = into_result(safe.declare_version())?;
Ok(Environment { safe })
}
}
unsafe impl<V> safe::Handle for Environment<V> {
fn handle(&self) -> ffi::SQLHANDLE {
self.safe.as_raw() as ffi::SQLHANDLE
}
fn handle_type() -> ffi::HandleType {
ffi::SQL_HANDLE_ENV
}
}
/// Creates an ODBC Environment and declares specifaciton of version 3.0 are used
///
/// # Example
/// ```
/// use odbc::*;
/// fn do_database_stuff() -> std::result::Result<(), Option<DiagnosticRecord>> {
/// let env = create_environment_v3()?; // first thing to do
/// // ...
/// Ok(())
/// }
/// ```
///
/// # Return
///
/// While most functions in this crate return a `DiagnosticRecord` in the event of an Error the
/// creation of an environment is special. Since `DiagnosticRecord`s are created using the
/// environment, at least its allocation has to be successful to obtain one. If the allocation
/// fails it is sadly not possible to receive further Diagnostics. Setting an unsupported version
/// may however result in an ordinary `Some(DiagnosticRecord)`.
pub fn create_environment_v3()
-> std::result::Result<Environment<Version3>, Option<DiagnosticRecord>>
{
Environment::new()
}
|
use std::fmt;
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use crate::runtime::Handle;
use crate::task::{JoinError, JoinHandle, LocalSet};
use crate::util::IdleNotifiedSet;
/// A collection of tasks spawned on a Tokio runtime.
///
/// A `JoinSet` can be used to await the completion of some or all of the tasks
/// in the set. The set is not ordered, and the tasks will be returned in the
/// order they complete.
///
/// All of the tasks must have the same return type `T`.
///
/// When the `JoinSet` is dropped, all tasks in the `JoinSet` are immediately aborted.
///
/// **Note**: This is an [unstable API][unstable]. The public API of this type
/// may break in 1.x releases. See [the documentation on unstable
/// features][unstable] for details.
///
/// # Examples
///
/// Spawn multiple tasks and wait for them.
///
/// ```
/// use tokio::task::JoinSet;
///
/// #[tokio::main]
/// async fn main() {
/// let mut set = JoinSet::new();
///
/// for i in 0..10 {
/// set.spawn(async move { i });
/// }
///
/// let mut seen = [false; 10];
/// while let Some(res) = set.join_one().await.unwrap() {
/// seen[res] = true;
/// }
///
/// for i in 0..10 {
/// assert!(seen[i]);
/// }
/// }
/// ```
///
/// [unstable]: crate#unstable-features
pub struct JoinSet<T> {
inner: IdleNotifiedSet<JoinHandle<T>>,
}
impl<T> JoinSet<T> {
/// Create a new `JoinSet`.
pub fn new() -> Self {
Self {
inner: IdleNotifiedSet::new(),
}
}
/// Returns the number of tasks currently in the `JoinSet`.
pub fn len(&self) -> usize {
self.inner.len()
}
/// Returns whether the `JoinSet` is empty.
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
impl<T: 'static> JoinSet<T> {
/// Spawn the provided task on the `JoinSet`.
///
/// # Panics
///
/// This method panics if called outside of a Tokio runtime.
pub fn spawn<F>(&mut self, task: F)
where
F: Future<Output = T>,
F: Send + 'static,
T: Send,
{
self.insert(crate::spawn(task));
}
/// Spawn the provided task on the provided runtime and store it in this `JoinSet`.
pub fn spawn_on<F>(&mut self, task: F, handle: &Handle)
where
F: Future<Output = T>,
F: Send + 'static,
T: Send,
{
self.insert(handle.spawn(task));
}
/// Spawn the provided task on the current [`LocalSet`] and store it in this `JoinSet`.
///
/// # Panics
///
/// This method panics if it is called outside of a `LocalSet`.
///
/// [`LocalSet`]: crate::task::LocalSet
pub fn spawn_local<F>(&mut self, task: F)
where
F: Future<Output = T>,
F: 'static,
{
self.insert(crate::task::spawn_local(task));
}
/// Spawn the provided task on the provided [`LocalSet`] and store it in this `JoinSet`.
///
/// [`LocalSet`]: crate::task::LocalSet
pub fn spawn_local_on<F>(&mut self, task: F, local_set: &LocalSet)
where
F: Future<Output = T>,
F: 'static,
{
self.insert(local_set.spawn_local(task));
}
fn insert(&mut self, jh: JoinHandle<T>) {
let mut entry = self.inner.insert_idle(jh);
// Set the waker that is notified when the task completes.
entry.with_value_and_context(|jh, ctx| jh.set_join_waker(ctx.waker()));
}
/// Waits until one of the tasks in the set completes and returns its output.
///
/// Returns `None` if the set is empty.
///
/// # Cancel Safety
///
/// This method is cancel safe. If `join_one` is used as the event in a `tokio::select!`
/// statement and some other branch completes first, it is guaranteed that no tasks were
/// removed from this `JoinSet`.
pub async fn join_one(&mut self) -> Result<Option<T>, JoinError> {
crate::future::poll_fn(|cx| self.poll_join_one(cx)).await
}
/// Aborts all tasks and waits for them to finish shutting down.
///
/// Calling this method is equivalent to calling [`abort_all`] and then calling [`join_one`] in
/// a loop until it returns `Ok(None)`.
///
/// This method ignores any panics in the tasks shutting down. When this call returns, the
/// `JoinSet` will be empty.
///
/// [`abort_all`]: fn@Self::abort_all
/// [`join_one`]: fn@Self::join_one
pub async fn shutdown(&mut self) {
self.abort_all();
while self.join_one().await.transpose().is_some() {}
}
/// Aborts all tasks on this `JoinSet`.
///
/// This does not remove the tasks from the `JoinSet`. To wait for the tasks to complete
/// cancellation, you should call `join_one` in a loop until the `JoinSet` is empty.
pub fn abort_all(&mut self) {
self.inner.for_each(|jh| jh.abort());
}
/// Removes all tasks from this `JoinSet` without aborting them.
///
/// The tasks removed by this call will continue to run in the background even if the `JoinSet`
/// is dropped.
pub fn detach_all(&mut self) {
self.inner.drain(drop);
}
/// Polls for one of the tasks in the set to complete.
///
/// If this returns `Poll::Ready(Ok(Some(_)))` or `Poll::Ready(Err(_))`, then the task that
/// completed is removed from the set.
///
/// When the method returns `Poll::Pending`, the `Waker` in the provided `Context` is scheduled
/// to receive a wakeup when a task in the `JoinSet` completes. Note that on multiple calls to
/// `poll_join_one`, only the `Waker` from the `Context` passed to the most recent call is
/// scheduled to receive a wakeup.
///
/// # Returns
///
/// This function returns:
///
/// * `Poll::Pending` if the `JoinSet` is not empty but there is no task whose output is
/// available right now.
/// * `Poll::Ready(Ok(Some(value)))` if one of the tasks in this `JoinSet` has completed. The
/// `value` is the return value of one of the tasks that completed.
/// * `Poll::Ready(Err(err))` if one of the tasks in this `JoinSet` has panicked or been
/// aborted.
/// * `Poll::Ready(Ok(None))` if the `JoinSet` is empty.
///
/// Note that this method may return `Poll::Pending` even if one of the tasks has completed.
/// This can happen if the [coop budget] is reached.
///
/// [coop budget]: crate::task#cooperative-scheduling
fn poll_join_one(&mut self, cx: &mut Context<'_>) -> Poll<Result<Option<T>, JoinError>> {
// The call to `pop_notified` moves the entry to the `idle` list. It is moved back to
// the `notified` list if the waker is notified in the `poll` call below.
let mut entry = match self.inner.pop_notified(cx.waker()) {
Some(entry) => entry,
None => {
if self.is_empty() {
return Poll::Ready(Ok(None));
} else {
// The waker was set by `pop_notified`.
return Poll::Pending;
}
}
};
let res = entry.with_value_and_context(|jh, ctx| Pin::new(jh).poll(ctx));
if let Poll::Ready(res) = res {
entry.remove();
Poll::Ready(Some(res).transpose())
} else {
// A JoinHandle generally won't emit a wakeup without being ready unless
// the coop limit has been reached. We yield to the executor in this
// case.
cx.waker().wake_by_ref();
Poll::Pending
}
}
}
impl<T> Drop for JoinSet<T> {
fn drop(&mut self) {
self.inner.drain(|join_handle| join_handle.abort());
}
}
impl<T> fmt::Debug for JoinSet<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("JoinSet").field("len", &self.len()).finish()
}
}
impl<T> Default for JoinSet<T> {
fn default() -> Self {
Self::new()
}
}
task: fix missing doc(cfg(...)) attributes for `JoinSet` (#4531)
## Motivation
The `JoinSet` type is currently missing the `tokio_unstable` and
`feature = "rt"` `doc(cfg(...))` attributes, making it erroneously
appear to be available without the required feature and without unstable
features enabled. This is incorrect.
I believe this is because `doc(cfg(...))` on a re-export doesn't
actually add the required cfgs to the type itself, and the
`cfg_unstable!` is currently only guarding a re-export and module.
## Solution
This PR fixes the missing attributes.
use std::fmt;
use std::future::Future;
use std::pin::Pin;
use std::task::{Context, Poll};
use crate::runtime::Handle;
use crate::task::{JoinError, JoinHandle, LocalSet};
use crate::util::IdleNotifiedSet;
/// A collection of tasks spawned on a Tokio runtime.
///
/// A `JoinSet` can be used to await the completion of some or all of the tasks
/// in the set. The set is not ordered, and the tasks will be returned in the
/// order they complete.
///
/// All of the tasks must have the same return type `T`.
///
/// When the `JoinSet` is dropped, all tasks in the `JoinSet` are immediately aborted.
///
/// **Note**: This is an [unstable API][unstable]. The public API of this type
/// may break in 1.x releases. See [the documentation on unstable
/// features][unstable] for details.
///
/// # Examples
///
/// Spawn multiple tasks and wait for them.
///
/// ```
/// use tokio::task::JoinSet;
///
/// #[tokio::main]
/// async fn main() {
/// let mut set = JoinSet::new();
///
/// for i in 0..10 {
/// set.spawn(async move { i });
/// }
///
/// let mut seen = [false; 10];
/// while let Some(res) = set.join_one().await.unwrap() {
/// seen[res] = true;
/// }
///
/// for i in 0..10 {
/// assert!(seen[i]);
/// }
/// }
/// ```
///
/// [unstable]: crate#unstable-features
#[cfg_attr(docsrs, doc(cfg(all(feature = "rt", tokio_unstable))))]
pub struct JoinSet<T> {
inner: IdleNotifiedSet<JoinHandle<T>>,
}
impl<T> JoinSet<T> {
/// Create a new `JoinSet`.
pub fn new() -> Self {
Self {
inner: IdleNotifiedSet::new(),
}
}
/// Returns the number of tasks currently in the `JoinSet`.
pub fn len(&self) -> usize {
self.inner.len()
}
/// Returns whether the `JoinSet` is empty.
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
impl<T: 'static> JoinSet<T> {
/// Spawn the provided task on the `JoinSet`.
///
/// # Panics
///
/// This method panics if called outside of a Tokio runtime.
pub fn spawn<F>(&mut self, task: F)
where
F: Future<Output = T>,
F: Send + 'static,
T: Send,
{
self.insert(crate::spawn(task));
}
/// Spawn the provided task on the provided runtime and store it in this `JoinSet`.
pub fn spawn_on<F>(&mut self, task: F, handle: &Handle)
where
F: Future<Output = T>,
F: Send + 'static,
T: Send,
{
self.insert(handle.spawn(task));
}
/// Spawn the provided task on the current [`LocalSet`] and store it in this `JoinSet`.
///
/// # Panics
///
/// This method panics if it is called outside of a `LocalSet`.
///
/// [`LocalSet`]: crate::task::LocalSet
pub fn spawn_local<F>(&mut self, task: F)
where
F: Future<Output = T>,
F: 'static,
{
self.insert(crate::task::spawn_local(task));
}
/// Spawn the provided task on the provided [`LocalSet`] and store it in this `JoinSet`.
///
/// [`LocalSet`]: crate::task::LocalSet
pub fn spawn_local_on<F>(&mut self, task: F, local_set: &LocalSet)
where
F: Future<Output = T>,
F: 'static,
{
self.insert(local_set.spawn_local(task));
}
fn insert(&mut self, jh: JoinHandle<T>) {
let mut entry = self.inner.insert_idle(jh);
// Set the waker that is notified when the task completes.
entry.with_value_and_context(|jh, ctx| jh.set_join_waker(ctx.waker()));
}
/// Waits until one of the tasks in the set completes and returns its output.
///
/// Returns `None` if the set is empty.
///
/// # Cancel Safety
///
/// This method is cancel safe. If `join_one` is used as the event in a `tokio::select!`
/// statement and some other branch completes first, it is guaranteed that no tasks were
/// removed from this `JoinSet`.
pub async fn join_one(&mut self) -> Result<Option<T>, JoinError> {
crate::future::poll_fn(|cx| self.poll_join_one(cx)).await
}
/// Aborts all tasks and waits for them to finish shutting down.
///
/// Calling this method is equivalent to calling [`abort_all`] and then calling [`join_one`] in
/// a loop until it returns `Ok(None)`.
///
/// This method ignores any panics in the tasks shutting down. When this call returns, the
/// `JoinSet` will be empty.
///
/// [`abort_all`]: fn@Self::abort_all
/// [`join_one`]: fn@Self::join_one
pub async fn shutdown(&mut self) {
self.abort_all();
while self.join_one().await.transpose().is_some() {}
}
/// Aborts all tasks on this `JoinSet`.
///
/// This does not remove the tasks from the `JoinSet`. To wait for the tasks to complete
/// cancellation, you should call `join_one` in a loop until the `JoinSet` is empty.
pub fn abort_all(&mut self) {
self.inner.for_each(|jh| jh.abort());
}
/// Removes all tasks from this `JoinSet` without aborting them.
///
/// The tasks removed by this call will continue to run in the background even if the `JoinSet`
/// is dropped.
pub fn detach_all(&mut self) {
self.inner.drain(drop);
}
/// Polls for one of the tasks in the set to complete.
///
/// If this returns `Poll::Ready(Ok(Some(_)))` or `Poll::Ready(Err(_))`, then the task that
/// completed is removed from the set.
///
/// When the method returns `Poll::Pending`, the `Waker` in the provided `Context` is scheduled
/// to receive a wakeup when a task in the `JoinSet` completes. Note that on multiple calls to
/// `poll_join_one`, only the `Waker` from the `Context` passed to the most recent call is
/// scheduled to receive a wakeup.
///
/// # Returns
///
/// This function returns:
///
/// * `Poll::Pending` if the `JoinSet` is not empty but there is no task whose output is
/// available right now.
/// * `Poll::Ready(Ok(Some(value)))` if one of the tasks in this `JoinSet` has completed. The
/// `value` is the return value of one of the tasks that completed.
/// * `Poll::Ready(Err(err))` if one of the tasks in this `JoinSet` has panicked or been
/// aborted.
/// * `Poll::Ready(Ok(None))` if the `JoinSet` is empty.
///
/// Note that this method may return `Poll::Pending` even if one of the tasks has completed.
/// This can happen if the [coop budget] is reached.
///
/// [coop budget]: crate::task#cooperative-scheduling
fn poll_join_one(&mut self, cx: &mut Context<'_>) -> Poll<Result<Option<T>, JoinError>> {
// The call to `pop_notified` moves the entry to the `idle` list. It is moved back to
// the `notified` list if the waker is notified in the `poll` call below.
let mut entry = match self.inner.pop_notified(cx.waker()) {
Some(entry) => entry,
None => {
if self.is_empty() {
return Poll::Ready(Ok(None));
} else {
// The waker was set by `pop_notified`.
return Poll::Pending;
}
}
};
let res = entry.with_value_and_context(|jh, ctx| Pin::new(jh).poll(ctx));
if let Poll::Ready(res) = res {
entry.remove();
Poll::Ready(Some(res).transpose())
} else {
// A JoinHandle generally won't emit a wakeup without being ready unless
// the coop limit has been reached. We yield to the executor in this
// case.
cx.waker().wake_by_ref();
Poll::Pending
}
}
}
impl<T> Drop for JoinSet<T> {
fn drop(&mut self) {
self.inner.drain(|join_handle| join_handle.abort());
}
}
impl<T> fmt::Debug for JoinSet<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("JoinSet").field("len", &self.len()).finish()
}
}
impl<T> Default for JoinSet<T> {
fn default() -> Self {
Self::new()
}
}
|
//! Definition of the `JoinAll` combinator, waiting for all of a list of futures
//! to finish.
use std::prelude::v1::*;
use std::fmt;
use std::mem;
use {Future, IntoFuture, Poll, Async};
#[derive(Debug)]
enum ElemState<T> where T: Future {
Pending(T),
Done(T::Item),
}
/// A future which takes a list of futures and resolves with a vector of the
/// completed values.
///
/// This future is created with the `join_all` method.
#[must_use = "futures do nothing unless polled"]
pub struct JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
elems: Vec<ElemState<<I::Item as IntoFuture>::Future>>,
}
impl<I> fmt::Debug for JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
<<I as IntoIterator>::Item as IntoFuture>::Future: fmt::Debug,
<<I as IntoIterator>::Item as IntoFuture>::Item: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("JoinAll")
.field("elems", &self.elems)
.finish()
}
}
/// Creates a future which represents a collection of the results of the futures
/// given.
///
/// The returned future will drive execution for all of its underlying futures,
/// collecting the results into a destination `Vec<T>`. If any future returns
/// an error then all other futures will be canceled and an error will be
/// returned immediately. If all futures complete successfully, however, then
/// the returned future will succeed with a `Vec` of all the successful results.
///
/// # Examples
///
/// ```
/// use futures::future::*;
///
/// let f = join_all(vec![
/// ok::<u32, u32>(1),
/// ok::<u32, u32>(2),
/// ok::<u32, u32>(3),
/// ]);
/// let f = f.map(|x| {
/// assert_eq!(x, [1, 2, 3]);
/// });
///
/// let f = join_all(vec![
/// ok::<u32, u32>(1).boxed(),
/// err::<u32, u32>(2).boxed(),
/// ok::<u32, u32>(3).boxed(),
/// ]);
/// let f = f.then(|x| {
/// assert_eq!(x, Err(2));
/// x
/// });
/// ```
pub fn join_all<I>(i: I) -> JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
let elems = i.into_iter().map(|f| {
ElemState::Pending(f.into_future())
}).collect();
JoinAll { elems: elems }
}
impl<I> Future for JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
type Item = Vec<<I::Item as IntoFuture>::Item>;
type Error = <I::Item as IntoFuture>::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let mut all_done = true;
for idx in 0 .. self.elems.len() {
let done_val = match self.elems[idx] {
ElemState::Pending(ref mut t) => {
match t.poll() {
Ok(Async::Ready(v)) => Ok(v),
Ok(Async::NotReady) => {
all_done = false;
continue
}
Err(e) => Err(e),
}
}
ElemState::Done(ref mut _v) => continue,
};
match done_val {
Ok(v) => self.elems[idx] = ElemState::Done(v),
Err(e) => {
// On completion drop all our associated resources
// ASAP.
self.elems = Vec::new();
return Err(e)
}
}
}
if all_done {
let elems = mem::replace(&mut self.elems, Vec::new());
let result = elems.into_iter().map(|e| {
match e {
ElemState::Done(t) => t,
_ => unreachable!(),
}
}).collect();
Ok(Async::Ready(result))
} else {
Ok(Async::NotReady)
}
}
}
Noted ordering guarantee in join_all documentation
This addresses issue https://github.com/alexcrichton/futures-rs/issues/524
//! Definition of the `JoinAll` combinator, waiting for all of a list of futures
//! to finish.
use std::prelude::v1::*;
use std::fmt;
use std::mem;
use {Future, IntoFuture, Poll, Async};
#[derive(Debug)]
enum ElemState<T> where T: Future {
Pending(T),
Done(T::Item),
}
/// A future which takes a list of futures and resolves with a vector of the
/// completed values.
///
/// This future is created with the `join_all` method.
#[must_use = "futures do nothing unless polled"]
pub struct JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
elems: Vec<ElemState<<I::Item as IntoFuture>::Future>>,
}
impl<I> fmt::Debug for JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
<<I as IntoIterator>::Item as IntoFuture>::Future: fmt::Debug,
<<I as IntoIterator>::Item as IntoFuture>::Item: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("JoinAll")
.field("elems", &self.elems)
.finish()
}
}
/// Creates a future which represents a collection of the results of the futures
/// given.
///
/// The returned future will drive execution for all of its underlying futures,
/// collecting the results into a destination `Vec<T>` in the same order as they
/// were provided. If any future returns an error then all other futures will be
/// canceled and an error will be returned immediately. If all futures complete
/// successfully, however, then the returned future will succeed with a `Vec` of
/// all the successful results.
///
/// # Examples
///
/// ```
/// use futures::future::*;
///
/// let f = join_all(vec![
/// ok::<u32, u32>(1),
/// ok::<u32, u32>(2),
/// ok::<u32, u32>(3),
/// ]);
/// let f = f.map(|x| {
/// assert_eq!(x, [1, 2, 3]);
/// });
///
/// let f = join_all(vec![
/// ok::<u32, u32>(1).boxed(),
/// err::<u32, u32>(2).boxed(),
/// ok::<u32, u32>(3).boxed(),
/// ]);
/// let f = f.then(|x| {
/// assert_eq!(x, Err(2));
/// x
/// });
/// ```
pub fn join_all<I>(i: I) -> JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
let elems = i.into_iter().map(|f| {
ElemState::Pending(f.into_future())
}).collect();
JoinAll { elems: elems }
}
impl<I> Future for JoinAll<I>
where I: IntoIterator,
I::Item: IntoFuture,
{
type Item = Vec<<I::Item as IntoFuture>::Item>;
type Error = <I::Item as IntoFuture>::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let mut all_done = true;
for idx in 0 .. self.elems.len() {
let done_val = match self.elems[idx] {
ElemState::Pending(ref mut t) => {
match t.poll() {
Ok(Async::Ready(v)) => Ok(v),
Ok(Async::NotReady) => {
all_done = false;
continue
}
Err(e) => Err(e),
}
}
ElemState::Done(ref mut _v) => continue,
};
match done_val {
Ok(v) => self.elems[idx] = ElemState::Done(v),
Err(e) => {
// On completion drop all our associated resources
// ASAP.
self.elems = Vec::new();
return Err(e)
}
}
}
if all_done {
let elems = mem::replace(&mut self.elems, Vec::new());
let result = elems.into_iter().map(|e| {
match e {
ElemState::Done(t) => t,
_ => unreachable!(),
}
}).collect();
Ok(Async::Ready(result))
} else {
Ok(Async::NotReady)
}
}
}
|
// (C)opyleft 2013 Frank Denis
/*!
* HyperLogLog implementation for Rust
*/
#[desc = "A hyperloglog implementation."];
#[license = "BSD"];
#[crate_id = "hyperloglog#0.1"];
#[crate_type = "rlib"];
#[warn(non_camel_case_types,
non_uppercase_statics,
unnecessary_qualification,
managed_heap_memory)];
extern crate extra;
use std::num;
use std::rand;
use std::vec;
static TRESHOLD_DATA: [f64, ..15] =
[10.0, 20.0, 40.0, 80.0, 220.0, 400.0, 900.0, 1800.0, 3100.0, 6500.0,
11500.0, 20000.0, 50000.0, 120000.0, 350000.0];
static RAW_ESTIMATE_DATA: &'static [&'static [f64]] =
&[&[11.0, 11.717, 12.207, 12.7896, 13.2882, 13.8204, 14.3772, 14.9342,
15.5202, 16.161, 16.7722, 17.4636, 18.0396, 18.6766, 19.3566, 20.0454,
20.7936, 21.4856, 22.2666, 22.9946, 23.766, 24.4692, 25.3638, 26.0764,
26.7864, 27.7602, 28.4814, 29.433, 30.2926, 31.0664, 31.9996, 32.7956,
33.5366, 34.5894, 35.5738, 36.2698, 37.3682, 38.0544, 39.2342,
40.0108, 40.7966, 41.9298, 42.8704, 43.6358, 44.5194, 45.773, 46.6772,
47.6174, 48.4888, 49.3304, 50.2506, 51.4996, 52.3824, 53.3078,
54.3984, 55.5838, 56.6618, 57.2174, 58.3514, 59.0802, 60.1482,
61.0376, 62.3598, 62.8078, 63.9744, 64.914, 65.781, 67.1806, 68.0594,
68.8446, 69.7928, 70.8248, 71.8324, 72.8598, 73.6246, 74.7014, 75.393,
76.6708, 77.2394],
&[23.0, 23.1194, 23.8208, 24.2318, 24.77, 25.2436, 25.7774, 26.2848,
26.8224, 27.3742, 27.9336, 28.503, 29.0494, 29.6292, 30.2124, 30.798,
31.367, 31.9728, 32.5944, 33.217, 33.8438, 34.3696, 35.0956, 35.7044,
36.324, 37.0668, 37.6698, 38.3644, 39.049, 39.6918, 40.4146, 41.082,
41.687, 42.5398, 43.2462, 43.857, 44.6606, 45.4168, 46.1248, 46.9222,
47.6804, 48.447, 49.3454, 49.9594, 50.7636, 51.5776, 52.331, 53.19,
53.9676, 54.7564, 55.5314, 56.4442, 57.3708, 57.9774, 58.9624,
59.8796, 60.755, 61.472, 62.2076, 63.1024, 63.8908, 64.7338, 65.7728,
66.629, 67.413, 68.3266, 69.1524, 70.2642, 71.1806, 72.0566, 72.9192,
73.7598, 74.3516, 75.5802, 76.4386, 77.4916, 78.1524, 79.1892,
79.8414, 80.8798, 81.8376, 82.4698, 83.7656, 84.331, 85.5914, 86.6012,
87.7016, 88.5582, 89.3394, 90.3544, 91.4912, 92.308, 93.3552, 93.9746,
95.2052, 95.727, 97.1322, 98.3944, 98.7588, 100.242, 101.1914,
102.2538, 102.8776, 103.6292, 105.1932, 105.9152, 107.0868, 107.6728,
108.7144, 110.3114, 110.8716, 111.245, 112.7908, 113.7064, 114.636,
115.7464, 116.1788, 117.7464, 118.4896, 119.6166, 120.5082, 121.7798,
122.9028, 123.4426, 124.8854, 125.705, 126.4652, 128.3464, 128.3462,
130.0398, 131.0342, 131.0042, 132.4766, 133.511, 134.7252, 135.425,
136.5172, 138.0572, 138.6694, 139.3712, 140.8598, 141.4594, 142.554,
143.4006, 144.7374, 146.1634, 146.8994, 147.605, 147.9304, 149.1636,
150.2468, 151.5876, 152.2096, 153.7032, 154.7146, 155.807, 156.9228,
157.0372, 158.5852],
&[46.0, 46.1902, 47.271, 47.8358, 48.8142, 49.2854, 50.317, 51.354,
51.8924, 52.9436, 53.4596, 54.5262, 55.6248, 56.1574, 57.2822, 57.837,
58.9636, 60.074, 60.7042, 61.7976, 62.4772, 63.6564, 64.7942, 65.5004,
66.686, 67.291, 68.5672, 69.8556, 70.4982, 71.8204, 72.4252, 73.7744,
75.0786, 75.8344, 77.0294, 77.8098, 79.0794, 80.5732, 81.1878,
82.5648, 83.2902, 84.6784, 85.3352, 86.8946, 88.3712, 89.0852, 90.499,
91.2686, 92.6844, 94.2234, 94.9732, 96.3356, 97.2286, 98.7262,
100.3284, 101.1048, 102.5962, 103.3562, 105.1272, 106.4184, 107.4974,
109.0822, 109.856, 111.48, 113.2834, 114.0208, 115.637, 116.5174,
118.0576, 119.7476, 120.427, 122.1326, 123.2372, 125.2788, 126.6776,
127.7926, 129.1952, 129.9564, 131.6454, 133.87, 134.5428, 136.2,
137.0294, 138.6278, 139.6782, 141.792, 143.3516, 144.2832, 146.0394,
147.0748, 148.4912, 150.849, 151.696, 153.5404, 154.073, 156.3714,
157.7216, 158.7328, 160.4208, 161.4184, 163.9424, 165.2772, 166.411,
168.1308, 168.769, 170.9258, 172.6828, 173.7502, 175.706, 176.3886,
179.0186, 180.4518, 181.927, 183.4172, 184.4114, 186.033, 188.5124,
189.5564, 191.6008, 192.4172, 193.8044, 194.997, 197.4548, 198.8948,
200.2346, 202.3086, 203.1548, 204.8842, 206.6508, 206.6772, 209.7254,
210.4752, 212.7228, 214.6614, 215.1676, 217.793, 218.0006, 219.9052,
221.66, 223.5588, 225.1636, 225.6882, 227.7126, 229.4502, 231.1978,
232.9756, 233.1654, 236.727, 238.1974, 237.7474, 241.1346, 242.3048,
244.1948, 245.3134, 246.879, 249.1204, 249.853, 252.6792, 253.857,
254.4486, 257.2362, 257.9534, 260.0286, 260.5632, 262.663, 264.723,
265.7566, 267.2566, 267.1624, 270.62, 272.8216, 273.2166, 275.2056,
276.2202, 278.3726, 280.3344, 281.9284, 283.9728, 284.1924, 286.4872,
287.587, 289.807, 291.1206, 292.769, 294.8708, 296.665, 297.1182,
299.4012, 300.6352, 302.1354, 304.1756, 306.1606, 307.3462, 308.5214,
309.4134, 310.8352, 313.9684, 315.837, 316.7796, 318.9858],
&[92.0, 93.4934, 94.9758, 96.4574, 97.9718, 99.4954, 101.5302, 103.0756,
104.6374, 106.1782, 107.7888, 109.9522, 111.592, 113.2532, 114.9086,
116.5938, 118.9474, 120.6796, 122.4394, 124.2176, 125.9768, 128.4214,
130.2528, 132.0102, 133.8658, 135.7278, 138.3044, 140.1316, 142.093,
144.0032, 145.9092, 148.6306, 150.5294, 152.5756, 154.6508, 156.662,
159.552, 161.3724, 163.617, 165.5754, 167.7872, 169.8444, 172.7988,
174.8606, 177.2118, 179.3566, 181.4476, 184.5882, 186.6816, 189.0824,
191.0258, 193.6048, 196.4436, 198.7274, 200.957, 203.147, 205.4364,
208.7592, 211.3386, 213.781, 215.8028, 218.656, 221.6544, 223.996,
226.4718, 229.1544, 231.6098, 234.5956, 237.0616, 239.5758, 242.4878,
244.5244, 248.2146, 250.724, 252.8722, 255.5198, 258.0414, 261.941,
264.9048, 266.87, 269.4304, 272.028, 274.4708, 278.37, 281.0624,
283.4668, 286.5532, 289.4352, 293.2564, 295.2744, 298.2118, 300.7472,
304.1456, 307.2928, 309.7504, 312.5528, 315.979, 318.2102, 322.1834,
324.3494, 327.325, 330.6614, 332.903, 337.2544, 339.9042, 343.215,
345.2864, 348.0814, 352.6764, 355.301, 357.139, 360.658, 363.1732,
366.5902, 369.9538, 373.0828, 375.922, 378.9902, 382.7328, 386.4538,
388.1136, 391.2234, 394.0878, 396.708, 401.1556, 404.1852, 406.6372,
409.6822, 412.7796, 416.6078, 418.4916, 422.131, 424.5376, 428.1988,
432.211, 434.4502, 438.5282, 440.912, 444.0448, 447.7432, 450.8524,
453.7988, 456.7858, 458.8868, 463.9886, 466.5064, 468.9124, 472.6616,
475.4682, 478.582, 481.304, 485.2738, 488.6894, 490.329, 496.106,
497.6908, 501.1374, 504.5322, 506.8848, 510.3324, 513.4512, 516.179,
520.4412, 522.6066, 526.167, 528.7794, 533.379, 536.067, 538.46,
542.9116, 545.692, 547.9546, 552.493, 555.2722, 557.335, 562.449,
564.2014, 569.0738, 571.0974, 574.8564, 578.2996, 581.409, 583.9704,
585.8098, 589.6528, 594.5998, 595.958, 600.068, 603.3278, 608.2016,
609.9632, 612.864, 615.43, 620.7794, 621.272, 625.8644, 629.206,
633.219, 634.5154, 638.6102],
&[184.2152, 187.2454, 190.2096, 193.6652, 196.6312, 199.6822, 203.249,
206.3296, 210.0038, 213.2074, 216.4612, 220.27, 223.5178, 227.4412,
230.8032, 234.1634, 238.1688, 241.6074, 245.6946, 249.2664, 252.8228,
257.0432, 260.6824, 264.9464, 268.6268, 272.2626, 276.8376, 280.4034,
284.8956, 288.8522, 292.7638, 297.3552, 301.3556, 305.7526, 309.9292,
313.8954, 318.8198, 322.7668, 327.298, 331.6688, 335.9466, 340.9746,
345.1672, 349.3474, 354.3028, 358.8912, 364.114, 368.4646, 372.9744,
378.4092, 382.6022, 387.843, 392.5684, 397.1652, 402.5426, 407.4152,
412.5388, 417.3592, 422.1366, 427.486, 432.3918, 437.5076, 442.509,
447.3834, 453.3498, 458.0668, 463.7346, 469.1228, 473.4528, 479.7,
484.644, 491.0518, 495.5774, 500.9068, 506.432, 512.1666, 517.434,
522.6644, 527.4894, 533.6312, 538.3804, 544.292, 550.5496, 556.0234,
562.8206, 566.6146, 572.4188, 579.117, 583.6762, 590.6576, 595.7864,
601.509, 607.5334, 612.9204, 619.772, 624.2924, 630.8654, 636.1836,
642.745, 649.1316, 655.0386, 660.0136, 666.6342, 671.6196, 678.1866,
684.4282, 689.3324, 695.4794, 702.5038, 708.129, 713.528, 720.3204,
726.463, 732.7928, 739.123, 744.7418, 751.2192, 756.5102, 762.6066,
769.0184, 775.2224, 781.4014, 787.7618, 794.1436, 798.6506, 805.6378,
811.766, 819.7514, 824.5776, 828.7322, 837.8048, 843.6302, 849.9336,
854.4798, 861.3388, 867.9894, 873.8196, 880.3136, 886.2308, 892.4588,
899.0816, 905.4076, 912.0064, 917.3878, 923.619, 929.998, 937.3482,
943.9506, 947.991, 955.1144, 962.203, 968.8222, 975.7324, 981.7826,
988.7666, 994.2648, 1000.3128, 1007.4082, 1013.7536, 1020.3376,
1026.7156, 1031.7478, 1037.4292, 1045.393, 1051.2278, 1058.3434,
1062.8726, 1071.884, 1076.806, 1082.9176, 1089.1678, 1095.5032,
1102.525, 1107.2264, 1115.315, 1120.93, 1127.252, 1134.1496,
1139.0408, 1147.5448, 1153.3296, 1158.1974, 1166.5262, 1174.3328,
1175.657, 1184.4222, 1190.9172, 1197.1292, 1204.4606, 1210.4578,
1218.8728, 1225.3336, 1226.6592, 1236.5768, 1241.363, 1249.4074,
1254.6566, 1260.8014, 1266.5454, 1274.5192],
&[369.0, 374.8294, 381.2452, 387.6698, 394.1464, 400.2024, 406.8782,
413.6598, 420.462, 427.2826, 433.7102, 440.7416, 447.9366, 455.1046,
462.285, 469.0668, 476.306, 483.8448, 491.301, 498.9886, 506.2422,
513.8138, 521.7074, 529.7428, 537.8402, 545.1664, 553.3534, 561.594,
569.6886, 577.7876, 585.65, 594.228, 602.8036, 611.1666, 620.0818,
628.0824, 637.2574, 646.302, 655.1644, 664.0056, 672.3802, 681.7192,
690.5234, 700.2084, 708.831, 718.485, 728.1112, 737.4764, 746.76,
756.3368, 766.5538, 775.5058, 785.2646, 795.5902, 804.3818, 814.8998,
824.9532, 835.2062, 845.2798, 854.4728, 864.9582, 875.3292, 886.171,
896.781, 906.5716, 916.7048, 927.5322, 937.875, 949.3972, 958.3464,
969.7274, 980.2834, 992.1444, 1003.4264, 1013.0166, 1024.018,
1035.0438, 1046.34, 1057.6856, 1068.9836, 1079.0312, 1091.677,
1102.3188, 1113.4846, 1124.4424, 1135.739, 1147.1488, 1158.9202,
1169.406, 1181.5342, 1193.2834, 1203.8954, 1216.3286, 1226.2146,
1239.6684, 1251.9946, 1262.123, 1275.4338, 1285.7378, 1296.076,
1308.9692, 1320.4964, 1333.0998, 1343.9864, 1357.7754, 1368.3208,
1380.4838, 1392.7388, 1406.0758, 1416.9098, 1428.9728, 1440.9228,
1453.9292, 1462.617, 1476.05, 1490.2996, 1500.6128, 1513.7392,
1524.5174, 1536.6322, 1548.2584, 1562.3766, 1572.423, 1587.1232,
1596.5164, 1610.5938, 1622.5972, 1633.1222, 1647.7674, 1658.5044,
1671.57, 1683.7044, 1695.4142, 1708.7102, 1720.6094, 1732.6522,
1747.841, 1756.4072, 1769.9786, 1782.3276, 1797.5216, 1808.3186,
1819.0694, 1834.354, 1844.575, 1856.2808, 1871.1288, 1880.7852,
1893.9622, 1906.3418, 1920.6548, 1932.9302, 1945.8584, 1955.473,
1968.8248, 1980.6446, 1995.9598, 2008.349, 2019.8556, 2033.0334,
2044.0206, 2059.3956, 2069.9174, 2082.6084, 2093.7036, 2106.6108,
2118.9124, 2132.301, 2144.7628, 2159.8422, 2171.0212, 2183.101,
2193.5112, 2208.052, 2221.3194, 2233.3282, 2247.295, 2257.7222,
2273.342, 2286.5638, 2299.6786, 2310.8114, 2322.3312, 2335.516,
2349.874, 2363.5968, 2373.865, 2387.1918, 2401.8328, 2414.8496,
2424.544, 2436.7592, 2447.1682, 2464.1958, 2474.3438, 2489.0006,
2497.4526, 2513.6586, 2527.19, 2540.7028, 2553.768],
&[738.1256, 750.4234, 763.1064, 775.4732, 788.4636, 801.0644, 814.488,
827.9654, 841.0832, 854.7864, 868.1992, 882.2176, 896.5228, 910.1716,
924.7752, 938.899, 953.6126, 968.6492, 982.9474, 998.5214, 1013.1064,
1028.6364, 1044.2468, 1059.4588, 1075.3832, 1091.0584, 1106.8606,
1123.3868, 1139.5062, 1156.1862, 1172.463, 1189.339, 1206.1936,
1223.1292, 1240.1854, 1257.2908, 1275.3324, 1292.8518, 1310.5204,
1328.4854, 1345.9318, 1364.552, 1381.4658, 1400.4256, 1419.849,
1438.152, 1456.8956, 1474.8792, 1494.118, 1513.62, 1532.5132,
1551.9322, 1570.7726, 1590.6086, 1610.5332, 1630.5918, 1650.4294,
1669.7662, 1690.4106, 1710.7338, 1730.9012, 1750.4486, 1770.1556,
1791.6338, 1812.7312, 1833.6264, 1853.9526, 1874.8742, 1896.8326,
1918.1966, 1939.5594, 1961.07, 1983.037, 2003.1804, 2026.071,
2047.4884, 2070.0848, 2091.2944, 2114.333, 2135.9626, 2158.2902,
2181.0814, 2202.0334, 2224.4832, 2246.39, 2269.7202, 2292.1714,
2314.2358, 2338.9346, 2360.891, 2384.0264, 2408.3834, 2430.1544,
2454.8684, 2476.9896, 2501.4368, 2522.8702, 2548.0408, 2570.6738,
2593.5208, 2617.0158, 2640.2302, 2664.0962, 2687.4986, 2714.2588,
2735.3914, 2759.6244, 2781.8378, 2808.0072, 2830.6516, 2856.2454,
2877.2136, 2903.4546, 2926.785, 2951.2294, 2976.468, 3000.867,
3023.6508, 3049.91, 3073.5984, 3098.162, 3121.5564, 3146.2328,
3170.9484, 3195.5902, 3221.3346, 3242.7032, 3271.6112, 3296.5546,
3317.7376, 3345.072, 3369.9518, 3394.326, 3418.1818, 3444.6926,
3469.086, 3494.2754, 3517.8698, 3544.248, 3565.3768, 3588.7234,
3616.979, 3643.7504, 3668.6812, 3695.72, 3719.7392, 3742.6224,
3770.4456, 3795.6602, 3819.9058, 3844.002, 3869.517, 3895.6824,
3920.8622, 3947.1364, 3973.985, 3995.4772, 4021.62, 4046.628, 4074.65,
4096.2256, 4121.831, 4146.6406, 4173.276, 4195.0744, 4223.9696,
4251.3708, 4272.9966, 4300.8046, 4326.302, 4353.1248, 4374.312,
4403.0322, 4426.819, 4450.0598, 4478.5206, 4504.8116, 4528.8928,
4553.9584, 4578.8712, 4603.8384, 4632.3872, 4655.5128, 4675.821,
4704.6222, 4731.9862, 4755.4174, 4781.2628, 4804.332, 4832.3048,
4862.8752, 4883.4148, 4906.9544, 4935.3516, 4954.3532, 4984.0248,
5011.217, 5035.3258, 5057.3672, 5084.1828],
&[1477.0, 1501.6014, 1526.5802, 1551.7942, 1577.3042, 1603.2062,
1629.8402, 1656.2292, 1682.9462, 1709.9926, 1737.3026, 1765.4252,
1793.0578, 1821.6092, 1849.626, 1878.5568, 1908.527, 1937.5154,
1967.1874, 1997.3878, 2027.37, 2058.1972, 2089.5728, 2120.1012,
2151.9668, 2183.292, 2216.0772, 2247.8578, 2280.6562, 2313.041,
2345.714, 2380.3112, 2414.1806, 2447.9854, 2481.656, 2516.346,
2551.5154, 2586.8378, 2621.7448, 2656.6722, 2693.5722, 2729.1462,
2765.4124, 2802.8728, 2838.898, 2876.408, 2913.4926, 2951.4938,
2989.6776, 3026.282, 3065.7704, 3104.1012, 3143.7388, 3181.6876,
3221.1872, 3261.5048, 3300.0214, 3339.806, 3381.409, 3421.4144,
3461.4294, 3502.2286, 3544.651, 3586.6156, 3627.337, 3670.083,
3711.1538, 3753.5094, 3797.01, 3838.6686, 3882.1678, 3922.8116,
3967.9978, 4009.9204, 4054.3286, 4097.5706, 4140.6014, 4185.544,
4229.5976, 4274.583, 4316.9438, 4361.672, 4406.2786, 4451.8628,
4496.1834, 4543.505, 4589.1816, 4632.5188, 4678.2294, 4724.8908,
4769.0194, 4817.052, 4861.4588, 4910.1596, 4956.4344, 5002.5238,
5048.13, 5093.6374, 5142.8162, 5187.7894, 5237.3984, 5285.6078,
5331.0858, 5379.1036, 5428.6258, 5474.6018, 5522.7618, 5571.5822,
5618.59, 5667.9992, 5714.88, 5763.454, 5808.6982, 5860.3644,
5910.2914, 5953.571, 6005.9232, 6055.1914, 6104.5882, 6154.5702,
6199.7036, 6251.1764, 6298.7596, 6350.0302, 6398.061, 6448.4694,
6495.933, 6548.0474, 6597.7166, 6646.9416, 6695.9208, 6742.6328,
6793.5276, 6842.1934, 6894.2372, 6945.3864, 6996.9228, 7044.2372,
7094.1374, 7142.2272, 7192.2942, 7238.8338, 7288.9006, 7344.0908,
7394.8544, 7443.5176, 7490.4148, 7542.9314, 7595.6738, 7641.9878,
7694.3688, 7743.0448, 7797.522, 7845.53, 7899.594, 7950.3132,
7996.455, 8050.9442, 8092.9114, 8153.1374, 8197.4472, 8252.8278,
8301.8728, 8348.6776, 8401.4698, 8453.551, 8504.6598, 8553.8944,
8604.1276, 8657.6514, 8710.3062, 8758.908, 8807.8706, 8862.1702,
8910.4668, 8960.77, 9007.2766, 9063.164, 9121.0534, 9164.1354,
9218.1594, 9267.767, 9319.0594, 9372.155, 9419.7126, 9474.3722,
9520.1338, 9572.368, 9622.7702, 9675.8448, 9726.5396, 9778.7378,
9827.6554, 9878.1922, 9928.7782, 9978.3984, 10026.578, 10076.5626,
10137.1618, 10177.5244, 10229.9176],
&[2954.0, 3003.4782, 3053.3568, 3104.3666, 3155.324, 3206.9598,
3259.648, 3312.539, 3366.1474, 3420.2576, 3474.8376, 3530.6076,
3586.451, 3643.38, 3700.4104, 3757.5638, 3815.9676, 3875.193,
3934.838, 3994.8548, 4055.018, 4117.1742, 4178.4482, 4241.1294,
4304.4776, 4367.4044, 4431.8724, 4496.3732, 4561.4304, 4627.5326,
4693.949, 4761.5532, 4828.7256, 4897.6182, 4965.5186, 5034.4528,
5104.865, 5174.7164, 5244.6828, 5316.6708, 5387.8312, 5459.9036,
5532.476, 5604.8652, 5679.6718, 5753.757, 5830.2072, 5905.2828,
5980.0434, 6056.6264, 6134.3192, 6211.5746, 6290.0816, 6367.1176,
6447.9796, 6526.5576, 6606.1858, 6686.9144, 6766.1142, 6847.0818,
6927.9664, 7010.9096, 7091.0816, 7175.3962, 7260.3454, 7344.018,
7426.4214, 7511.3106, 7596.0686, 7679.8094, 7765.818, 7852.4248,
7936.834, 8022.363, 8109.5066, 8200.4554, 8288.5832, 8373.366,
8463.4808, 8549.7682, 8642.0522, 8728.3288, 8820.9528, 8907.727,
9001.0794, 9091.2522, 9179.988, 9269.852, 9362.6394, 9453.642,
9546.9024, 9640.6616, 9732.6622, 9824.3254, 9917.7484, 10007.9392,
10106.7508, 10196.2152, 10289.8114, 10383.5494, 10482.3064,
10576.8734, 10668.7872, 10764.7156, 10862.0196, 10952.793, 11049.9748,
11146.0702, 11241.4492, 11339.2772, 11434.2336, 11530.741, 11627.6136,
11726.311, 11821.5964, 11918.837, 12015.3724, 12113.0162, 12213.0424,
12306.9804, 12408.4518, 12504.8968, 12604.586, 12700.9332, 12798.705,
12898.5142, 12997.0488, 13094.788, 13198.475, 13292.7764, 13392.9698,
13486.8574, 13590.1616, 13686.5838, 13783.6264, 13887.2638,
13992.0978, 14081.0844, 14189.9956, 14280.0912, 14382.4956,
14486.4384, 14588.1082, 14686.2392, 14782.276, 14888.0284, 14985.1864,
15088.8596, 15187.0998, 15285.027, 15383.6694, 15495.8266, 15591.3736,
15694.2008, 15790.3246, 15898.4116, 15997.4522, 16095.5014,
16198.8514, 16291.7492, 16402.6424, 16499.1266, 16606.2436,
16697.7186, 16796.3946, 16902.3376, 17005.7672, 17100.814, 17206.8282,
17305.8262, 17416.0744, 17508.4092, 17617.0178, 17715.4554, 17816.758,
17920.1748, 18012.9236, 18119.7984, 18223.2248, 18324.2482,
18426.6276, 18525.0932, 18629.8976, 18733.2588, 18831.0466,
18940.1366, 19032.2696, 19131.729, 19243.4864, 19349.6932, 19442.866,
19547.9448, 19653.2798, 19754.4034, 19854.0692, 19965.1224,
20065.1774, 20158.2212, 20253.353, 20366.3264, 20463.22],
&[5908.5052, 6007.2672, 6107.347, 6208.5794, 6311.2622, 6414.5514,
6519.3376, 6625.6952, 6732.5988, 6841.3552, 6950.5972, 7061.3082,
7173.5646, 7287.109, 7401.8216, 7516.4344, 7633.3802, 7751.2962,
7870.3784, 7990.292, 8110.79, 8233.4574, 8356.6036, 8482.2712,
8607.7708, 8735.099, 8863.1858, 8993.4746, 9123.8496, 9255.6794,
9388.5448, 9522.7516, 9657.3106, 9792.6094, 9930.5642, 10068.794,
10206.7256, 10347.81, 10490.3196, 10632.0778, 10775.9916, 10920.4662,
11066.124, 11213.073, 11358.0362, 11508.1006, 11659.1716, 11808.7514,
11959.4884, 12112.1314, 12265.037, 12420.3756, 12578.933, 12734.311,
12890.0006, 13047.2144, 13207.3096, 13368.5144, 13528.024, 13689.847,
13852.7528, 14018.3168, 14180.5372, 14346.9668, 14513.5074, 14677.867,
14846.2186, 15017.4186, 15184.9716, 15356.339, 15529.2972, 15697.3578,
15871.8686, 16042.187, 16216.4094, 16389.4188, 16565.9126, 16742.3272,
16919.0042, 17094.7592, 17273.965, 17451.8342, 17634.4254, 17810.5984,
17988.9242, 18171.051, 18354.7938, 18539.466, 18721.0408, 18904.9972,
19081.867, 19271.9118, 19451.8694, 19637.9816, 19821.2922, 20013.1292,
20199.3858, 20387.8726, 20572.9514, 20770.7764, 20955.1714, 21144.751,
21329.9952, 21520.709, 21712.7016, 21906.3868, 22096.2626, 22286.0524,
22475.051, 22665.5098, 22862.8492, 23055.5294, 23249.6138, 23437.848,
23636.273, 23826.093, 24020.3296, 24213.3896, 24411.7392, 24602.9614,
24805.7952, 24998.1552, 25193.9588, 25389.0166, 25585.8392,
25780.6976, 25981.2728, 26175.977, 26376.5252, 26570.1964, 26773.387,
26962.9812, 27163.0586, 27368.164, 27565.0534, 27758.7428, 27961.1276,
28163.2324, 28362.3816, 28565.7668, 28758.644, 28956.9768, 29163.4722,
29354.7026, 29561.1186, 29767.9948, 29959.9986, 30164.0492,
30366.9818, 30562.5338, 30762.9928, 30976.1592, 31166.274, 31376.722,
31570.3734, 31770.809, 31974.8934, 32179.5286, 32387.5442, 32582.3504,
32794.076, 32989.9528, 33191.842, 33392.4684, 33595.659, 33801.8672,
34000.3414, 34200.0922, 34402.6792, 34610.0638, 34804.0084, 35011.13,
35218.669, 35418.6634, 35619.0792, 35830.6534, 36028.4966, 36229.7902,
36438.6422, 36630.7764, 36833.3102, 37048.6728, 37247.3916,
37453.5904, 37669.3614, 37854.5526, 38059.305, 38268.0936, 38470.2516,
38674.7064, 38876.167, 39068.3794, 39281.9144, 39492.8566, 39684.8628,
39898.4108, 40093.1836, 40297.6858, 40489.7086, 40717.2424],
&[11817.475, 12015.0046, 12215.3792, 12417.7504, 12623.1814, 12830.0086,
13040.0072, 13252.503, 13466.178, 13683.2738, 13902.0344, 14123.9798,
14347.394, 14573.7784, 14802.6894, 15033.6824, 15266.9134, 15502.8624,
15741.4944, 15980.7956, 16223.8916, 16468.6316, 16715.733, 16965.5726,
17217.204, 17470.666, 17727.8516, 17986.7886, 18247.6902, 18510.9632,
18775.304, 19044.7486, 19314.4408, 19587.202, 19862.2576, 20135.924,
20417.0324, 20697.9788, 20979.6112, 21265.0274, 21550.723, 21841.6906,
22132.162, 22428.1406, 22722.127, 23020.5606, 23319.7394, 23620.4014,
23925.2728, 24226.9224, 24535.581, 24845.505, 25155.9618, 25470.3828,
25785.9702, 26103.7764, 26420.4132, 26742.0186, 27062.8852, 27388.415,
27714.6024, 28042.296, 28365.4494, 28701.1526, 29031.8008, 29364.2156,
29704.497, 30037.1458, 30380.111, 30723.8168, 31059.5114, 31404.9498,
31751.6752, 32095.2686, 32444.7792, 32794.767, 33145.204, 33498.4226,
33847.6502, 34209.006, 34560.849, 34919.4838, 35274.9778, 35635.1322,
35996.3266, 36359.1394, 36722.8266, 37082.8516, 37447.7354,
37815.9606, 38191.0692, 38559.4106, 38924.8112, 39294.6726, 39663.973,
40042.261, 40416.2036, 40779.2036, 41161.6436, 41540.9014, 41921.1998,
42294.7698, 42678.5264, 43061.3464, 43432.375, 43818.432, 44198.6598,
44583.0138, 44970.4794, 45353.924, 45729.858, 46118.2224, 46511.5724,
46900.7386, 47280.6964, 47668.1472, 48055.6796, 48446.9436,
48838.7146, 49217.7296, 49613.7796, 50010.7508, 50410.0208,
50793.7886, 51190.2456, 51583.1882, 51971.0796, 52376.5338, 52763.319,
53165.5534, 53556.5594, 53948.2702, 54346.352, 54748.7914, 55138.577,
55543.4824, 55941.1748, 56333.7746, 56745.1552, 57142.7944,
57545.2236, 57935.9956, 58348.5268, 58737.5474, 59158.5962,
59542.6896, 59958.8004, 60349.3788, 60755.0212, 61147.6144, 61548.194,
61946.0696, 62348.6042, 62763.603, 63162.781, 63560.635, 63974.3482,
64366.4908, 64771.5876, 65176.7346, 65597.3916, 65995.915, 66394.0384,
66822.9396, 67203.6336, 67612.2032, 68019.0078, 68420.0388, 68821.22,
69235.8388, 69640.0724, 70055.155, 70466.357, 70863.4266, 71276.2482,
71677.0306, 72080.2006, 72493.0214, 72893.5952, 73314.5856,
73714.9852, 74125.3022, 74521.2122, 74933.6814, 75341.5904,
75743.0244, 76166.0278, 76572.1322, 76973.1028, 77381.6284,
77800.6092, 78189.328, 78607.0962, 79012.2508, 79407.8358, 79825.725,
80238.701, 80646.891, 81035.6436, 81460.0448, 81876.3884],
&[23635.0036, 24030.8034, 24431.4744, 24837.1524, 25246.7928, 25661.326,
26081.3532, 26505.2806, 26933.9892, 27367.7098, 27805.318, 28248.799,
28696.4382, 29148.8244, 29605.5138, 30066.8668, 30534.2344, 31006.32,
31480.778, 31962.2418, 32447.3324, 32938.0232, 33432.731, 33930.728,
34433.9896, 34944.1402, 35457.5588, 35974.5958, 36497.3296,
37021.9096, 37554.326, 38088.0826, 38628.8816, 39171.3192, 39723.2326,
40274.5554, 40832.3142, 41390.613, 41959.5908, 42532.5466, 43102.0344,
43683.5072, 44266.694, 44851.2822, 45440.7862, 46038.0586, 46640.3164,
47241.064, 47846.155, 48454.7396, 49076.9168, 49692.542, 50317.4778,
50939.65, 51572.5596, 52210.2906, 52843.7396, 53481.3996, 54127.236,
54770.406, 55422.6598, 56078.7958, 56736.7174, 57397.6784, 58064.5784,
58730.308, 59404.9784, 60077.0864, 60751.9158, 61444.1386, 62115.817,
62808.7742, 63501.4774, 64187.5454, 64883.6622, 65582.7468,
66274.5318, 66976.9276, 67688.7764, 68402.138, 69109.6274, 69822.9706,
70543.6108, 71265.5202, 71983.3848, 72708.4656, 73433.384, 74158.4664,
74896.4868, 75620.9564, 76362.1434, 77098.3204, 77835.7662,
78582.6114, 79323.9902, 80067.8658, 80814.9246, 81567.0136,
82310.8536, 83061.9952, 83821.4096, 84580.8608, 85335.547, 86092.5802,
86851.6506, 87612.311, 88381.2016, 89146.3296, 89907.8974, 90676.846,
91451.4152, 92224.5518, 92995.8686, 93763.5066, 94551.2796,
95315.1944, 96096.1806, 96881.0918, 97665.679, 98442.68, 99229.3002,
100011.0994, 100790.6386, 101580.1564, 102377.7484, 103152.1392,
103944.2712, 104730.216, 105528.6336, 106324.9398, 107117.6706,
107890.3988, 108695.2266, 109485.238, 110294.7876, 111075.0958,
111878.0496, 112695.2864, 113464.5486, 114270.0474, 115068.608,
115884.3626, 116673.2588, 117483.3716, 118275.097, 119085.4092,
119879.2808, 120687.5868, 121499.9944, 122284.916, 123095.9254,
123912.5038, 124709.0454, 125503.7182, 126323.259, 127138.9412,
127943.8294, 128755.646, 129556.5354, 130375.3298, 131161.4734,
131971.1962, 132787.5458, 133588.1056, 134431.351, 135220.2906,
136023.398, 136846.6558, 137667.0004, 138463.663, 139283.7154,
140074.6146, 140901.3072, 141721.8548, 142543.2322, 143356.1096,
144173.7412, 144973.0948, 145794.3162, 146609.5714, 147420.003,
148237.9784, 149050.5696, 149854.761, 150663.1966, 151494.0754,
152313.1416, 153112.6902, 153935.7206, 154746.9262, 155559.547,
156401.9746, 157228.7036, 158008.7254, 158820.75, 159646.9184,
160470.4458, 161279.5348, 162093.3114, 162918.542, 163729.2842],
&[47271.0, 48062.3584, 48862.7074, 49673.152, 50492.8416, 51322.9514,
52161.03, 53009.407, 53867.6348, 54734.206, 55610.5144, 56496.2096,
57390.795, 58297.268, 59210.6448, 60134.665, 61068.0248, 62010.4472,
62962.5204, 63923.5742, 64895.0194, 65876.4182, 66862.6136,
67862.6968, 68868.8908, 69882.8544, 70911.271, 71944.0924, 72990.0326,
74040.692, 75100.6336, 76174.7826, 77252.5998, 78340.2974, 79438.2572,
80545.4976, 81657.2796, 82784.6336, 83915.515, 85059.7362, 86205.9368,
87364.4424, 88530.3358, 89707.3744, 90885.9638, 92080.197, 93275.5738,
94479.391, 95695.918, 96919.2236, 98148.4602, 99382.3474, 100625.6974,
101878.0284, 103141.6278, 104409.4588, 105686.2882, 106967.5402,
108261.6032, 109548.1578, 110852.0728, 112162.231, 113479.0072,
114806.2626, 116137.9072, 117469.5048, 118813.5186, 120165.4876,
121516.2556, 122875.766, 124250.5444, 125621.2222, 127003.2352,
128387.848, 129775.2644, 131181.7776, 132577.3086, 133979.9458,
135394.1132, 136800.9078, 138233.217, 139668.5308, 141085.212,
142535.2122, 143969.0684, 145420.2872, 146878.1542, 148332.7572,
149800.3202, 151269.66, 152743.6104, 154213.0948, 155690.288,
157169.4246, 158672.1756, 160160.059, 161650.6854, 163145.7772,
164645.6726, 166159.1952, 167682.1578, 169177.3328, 170700.0118,
172228.8964, 173732.6664, 175265.5556, 176787.799, 178317.111,
179856.6914, 181400.865, 182943.4612, 184486.742, 186033.4698,
187583.7886, 189148.1868, 190688.4526, 192250.1926, 193810.9042,
195354.2972, 196938.7682, 198493.5898, 200079.2824, 201618.912,
203205.5492, 204765.5798, 206356.1124, 207929.3064, 209498.7196,
211086.229, 212675.1324, 214256.7892, 215826.2392, 217412.8474,
218995.6724, 220618.6038, 222207.1166, 223781.0364, 225387.4332,
227005.7928, 228590.4336, 230217.8738, 231805.1054, 233408.9,
234995.3432, 236601.4956, 238190.7904, 239817.2548, 241411.2832,
243002.4066, 244640.1884, 246255.3128, 247849.3508, 249479.9734,
251106.8822, 252705.027, 254332.9242, 255935.129, 257526.9014,
259154.772, 260777.625, 262390.253, 264004.4906, 265643.59,
267255.4076, 268873.426, 270470.7252, 272106.4804, 273722.4456,
275337.794, 276945.7038, 278592.9154, 280204.3726, 281841.1606,
283489.171, 285130.1716, 286735.3362, 288364.7164, 289961.1814,
291595.5524, 293285.683, 294899.6668, 296499.3434, 298128.0462,
299761.8946, 301394.2424, 302997.6748, 304615.1478, 306269.7724,
307886.114, 309543.1028, 311153.2862, 312782.8546, 314421.2008,
316033.2438, 317692.9636, 319305.2648, 320948.7406, 322566.3364,
324228.4224, 325847.1542],
&[94542.0, 96125.811, 97728.019, 99348.558, 100987.9705, 102646.7565,
104324.5125, 106021.7435, 107736.7865, 109469.272, 111223.9465,
112995.219, 114787.432, 116593.152, 118422.71, 120267.2345,
122134.6765, 124020.937, 125927.2705, 127851.255, 129788.9485,
131751.016, 133726.8225, 135722.592, 137736.789, 139770.568,
141821.518, 143891.343, 145982.1415, 148095.387, 150207.526,
152355.649, 154515.6415, 156696.05, 158887.7575, 161098.159,
163329.852, 165569.053, 167837.4005, 170121.6165, 172420.4595,
174732.6265, 177062.77, 179412.502, 181774.035, 184151.939,
186551.6895, 188965.691, 191402.8095, 193857.949, 196305.0775,
198774.6715, 201271.2585, 203764.78, 206299.3695, 208818.1365,
211373.115, 213946.7465, 216532.076, 219105.541, 221714.5375,
224337.5135, 226977.5125, 229613.0655, 232270.2685, 234952.2065,
237645.3555, 240331.1925, 243034.517, 245756.0725, 248517.6865,
251232.737, 254011.3955, 256785.995, 259556.44, 262368.335,
265156.911, 267965.266, 270785.583, 273616.0495, 276487.4835,
279346.639, 282202.509, 285074.3885, 287942.2855, 290856.018,
293774.0345, 296678.5145, 299603.6355, 302552.6575, 305492.9785,
308466.8605, 311392.581, 314347.538, 317319.4295, 320285.9785,
323301.7325, 326298.3235, 329301.3105, 332301.987, 335309.791,
338370.762, 341382.923, 344431.1265, 347464.1545, 350507.28,
353619.2345, 356631.2005, 359685.203, 362776.7845, 365886.488,
368958.2255, 372060.6825, 375165.4335, 378237.935, 381328.311,
384430.5225, 387576.425, 390683.242, 393839.648, 396977.8425,
400101.9805, 403271.296, 406409.8425, 409529.5485, 412678.7,
415847.423, 419020.8035, 422157.081, 425337.749, 428479.6165,
431700.902, 434893.1915, 438049.582, 441210.5415, 444379.2545,
447577.356, 450741.931, 453959.548, 457137.0935, 460329.846,
463537.4815, 466732.3345, 469960.5615, 473164.681, 476347.6345,
479496.173, 482813.1645, 486025.6995, 489249.4885, 492460.1945,
495675.8805, 498908.0075, 502131.802, 505374.3855, 508550.9915,
511806.7305, 515026.776, 518217.0005, 521523.9855, 524705.9855,
527950.997, 531210.0265, 534472.497, 537750.7315, 540926.922,
544207.094, 547429.4345, 550666.3745, 553975.3475, 557150.7185,
560399.6165, 563662.697, 566916.7395, 570146.1215, 573447.425,
576689.6245, 579874.5745, 583202.337, 586503.0255, 589715.635,
592910.161, 596214.3885, 599488.035, 602740.92, 605983.0685,
609248.67, 612491.3605, 615787.912, 619107.5245, 622307.9555,
625577.333, 628840.4385, 632085.2155, 635317.6135, 638691.7195,
641887.467, 645139.9405, 648441.546, 651666.252, 654941.845],
&[189084.0, 192250.913, 195456.774, 198696.946, 201977.762, 205294.444,
208651.754, 212042.099, 215472.269, 218941.91, 222443.912, 225996.845,
229568.199, 233193.568, 236844.457, 240543.233, 244279.475, 248044.27,
251854.588, 255693.2, 259583.619, 263494.621, 267445.385, 271454.061,
275468.769, 279549.456, 283646.446, 287788.198, 291966.099,
296181.164, 300431.469, 304718.618, 309024.004, 313393.508,
317760.803, 322209.731, 326675.061, 331160.627, 335654.47, 340241.442,
344841.833, 349467.132, 354130.629, 358819.432, 363574.626,
368296.587, 373118.482, 377914.93, 382782.301, 387680.669, 392601.981,
397544.323, 402529.115, 407546.018, 412593.658, 417638.657,
422762.865, 427886.169, 433017.167, 438213.273, 443441.254,
448692.421, 453937.533, 459239.049, 464529.569, 469910.083, 475274.03,
480684.473, 486070.26, 491515.237, 496995.651, 502476.617, 507973.609,
513497.19, 519083.233, 524726.509, 530305.505, 535945.728, 541584.404,
547274.055, 552967.236, 558667.862, 564360.216, 570128.148, 575965.08,
581701.952, 587532.523, 593361.144, 599246.128, 605033.418,
610958.779, 616837.117, 622772.818, 628672.04, 634675.369, 640574.831,
646585.739, 652574.547, 658611.217, 664642.684, 670713.914,
676737.681, 682797.313, 688837.897, 694917.874, 701009.882,
707173.648, 713257.254, 719415.392, 725636.761, 731710.697,
737906.209, 744103.074, 750313.39, 756504.185, 762712.579, 768876.985,
775167.859, 781359.0, 787615.959, 793863.597, 800245.477, 806464.582,
812785.294, 819005.925, 825403.057, 831676.197, 837936.284,
844266.968, 850642.711, 856959.756, 863322.774, 869699.931,
876102.478, 882355.787, 888694.463, 895159.952, 901536.143,
907872.631, 914293.672, 920615.14, 927130.974, 933409.404, 939922.178,
946331.47, 952745.93, 959209.264, 965590.224, 972077.284, 978501.961,
984953.19, 991413.271, 997817.479, 1004222.658, 1010725.676,
1017177.138, 1023612.529, 1030098.236, 1036493.719, 1043112.207,
1049537.036, 1056008.096, 1062476.184, 1068942.337, 1075524.95,
1081932.864, 1088426.025, 1094776.005, 1101327.448, 1107901.673,
1114423.639, 1120884.602, 1127324.923, 1133794.24, 1140328.886,
1146849.376, 1153346.682, 1159836.502, 1166478.703, 1172953.304,
1179391.502, 1185950.982, 1192544.052, 1198913.41, 1205430.994,
1212015.525, 1218674.042, 1225121.683, 1231551.101, 1238126.379,
1244673.795, 1251260.649, 1257697.86, 1264320.983, 1270736.319,
1277274.694, 1283804.95, 1290211.514, 1296858.568, 1303455.691]];
static BIAS_DATA: &'static [&'static [f64]] =
&[&[10.0, 9.717, 9.207, 8.7896, 8.2882, 7.8204, 7.3772, 6.9342, 6.5202,
6.161, 5.7722, 5.4636, 5.0396, 4.6766, 4.3566, 4.0454, 3.7936, 3.4856,
3.2666, 2.9946, 2.766, 2.4692, 2.3638, 2.0764, 1.7864, 1.7602, 1.4814,
1.433, 1.2926, 1.0664, 0.999600000000001, 0.7956, 0.5366,
0.589399999999998, 0.573799999999999, 0.269799999999996,
0.368200000000002, 0.0544000000000011, 0.234200000000001,
0.0108000000000033, -0.203400000000002, -0.0701999999999998,
-0.129600000000003, -0.364199999999997, -0.480600000000003,
-0.226999999999997, -0.322800000000001, -0.382599999999996,
-0.511200000000002, -0.669600000000003, -0.749400000000001,
-0.500399999999999, -0.617600000000003, -0.6922, -0.601599999999998,
-0.416200000000003, -0.338200000000001, -0.782600000000002,
-0.648600000000002, -0.919800000000002, -0.851799999999997,
-0.962400000000002, -0.6402, -1.1922, -1.0256, -1.086,
-1.21899999999999, -0.819400000000002, -0.940600000000003, -1.1554,
-1.2072, -1.1752, -1.16759999999999, -1.14019999999999, -1.3754,
-1.29859999999999, -1.607, -1.3292, -1.7606],
&[22.0, 21.1194, 20.8208, 20.2318, 19.77, 19.2436, 18.7774, 18.2848,
17.8224, 17.3742, 16.9336, 16.503, 16.0494, 15.6292, 15.2124, 14.798,
14.367, 13.9728, 13.5944, 13.217, 12.8438, 12.3696, 12.0956, 11.7044,
11.324, 11.0668, 10.6698, 10.3644, 10.049, 9.6918, 9.4146, 9.082,
8.687, 8.5398, 8.2462, 7.857, 7.6606, 7.4168, 7.1248, 6.9222, 6.6804,
6.447, 6.3454, 5.9594, 5.7636, 5.5776, 5.331, 5.19, 4.9676, 4.7564,
4.5314, 4.4442, 4.3708, 3.9774, 3.9624, 3.8796, 3.755, 3.472, 3.2076,
3.1024, 2.8908, 2.7338, 2.7728, 2.629, 2.413, 2.3266, 2.1524, 2.2642,
2.1806, 2.0566, 1.9192, 1.7598, 1.3516, 1.5802, 1.43859999999999,
1.49160000000001, 1.1524, 1.1892, 0.841399999999993,
0.879800000000003, 0.837599999999995, 0.469800000000006,
0.765600000000006, 0.331000000000003, 0.591399999999993,
0.601200000000006, 0.701599999999999, 0.558199999999999,
0.339399999999998, 0.354399999999998, 0.491200000000006,
0.308000000000007, 0.355199999999996, -0.0254000000000048,
0.205200000000005, -0.272999999999996, 0.132199999999997,
0.394400000000005, -0.241200000000006, 0.242000000000004,
0.191400000000002, 0.253799999999998, -0.122399999999999,
-0.370800000000003, 0.193200000000004, -0.0848000000000013,
0.0867999999999967, -0.327200000000005, -0.285600000000002,
0.311400000000006, -0.128399999999999, -0.754999999999995,
-0.209199999999996, -0.293599999999998, -0.364000000000004,
-0.253600000000006, -0.821200000000005, -0.253600000000006,
-0.510400000000004, -0.383399999999995, -0.491799999999998,
-0.220200000000006, -0.0972000000000008, -0.557400000000001,
-0.114599999999996, -0.295000000000002, -0.534800000000004,
0.346399999999988, -0.65379999999999, 0.0398000000000138,
0.0341999999999985, -0.995800000000003, -0.523400000000009,
-0.489000000000004, -0.274799999999999, -0.574999999999989,
-0.482799999999997, 0.0571999999999946, -0.330600000000004,
-0.628800000000012, -0.140199999999993, -0.540600000000012,
-0.445999999999998, -0.599400000000003, -0.262599999999992,
0.163399999999996, -0.100599999999986, -0.39500000000001,
-1.06960000000001, -0.836399999999998, -0.753199999999993,
-0.412399999999991, -0.790400000000005, -0.29679999999999,
-0.28540000000001, -0.193000000000012, -0.0772000000000048,
-0.962799999999987, -0.414800000000014],
&[45.0, 44.1902, 43.271, 42.8358, 41.8142, 41.2854, 40.317, 39.354,
38.8924, 37.9436, 37.4596, 36.5262, 35.6248, 35.1574, 34.2822, 33.837,
32.9636, 32.074, 31.7042, 30.7976, 30.4772, 29.6564, 28.7942, 28.5004,
27.686, 27.291, 26.5672, 25.8556, 25.4982, 24.8204, 24.4252, 23.7744,
23.0786, 22.8344, 22.0294, 21.8098, 21.0794, 20.5732, 20.1878,
19.5648, 19.2902, 18.6784, 18.3352, 17.8946, 17.3712, 17.0852, 16.499,
16.2686, 15.6844, 15.2234, 14.9732, 14.3356, 14.2286, 13.7262,
13.3284, 13.1048, 12.5962, 12.3562, 12.1272, 11.4184, 11.4974,
11.0822, 10.856, 10.48, 10.2834, 10.0208, 9.637, 9.51739999999999,
9.05759999999999, 8.74760000000001, 8.42700000000001, 8.1326, 8.2372,
8.2788, 7.6776, 7.79259999999999, 7.1952, 6.9564, 6.6454, 6.87,
6.5428, 6.19999999999999, 6.02940000000001, 5.62780000000001, 5.6782,
5.792, 5.35159999999999, 5.28319999999999, 5.0394, 5.07480000000001,
4.49119999999999, 4.84899999999999, 4.696, 4.54040000000001,
4.07300000000001, 4.37139999999999, 3.7216, 3.7328, 3.42080000000001,
3.41839999999999, 3.94239999999999, 3.27719999999999, 3.411,
3.13079999999999, 2.76900000000001, 2.92580000000001,
2.68279999999999, 2.75020000000001, 2.70599999999999, 2.3886,
3.01859999999999, 2.45179999999999, 2.92699999999999,
2.41720000000001, 2.41139999999999, 2.03299999999999,
2.51240000000001, 2.5564, 2.60079999999999, 2.41720000000001,
1.80439999999999, 1.99700000000001, 2.45480000000001, 1.8948, 2.2346,
2.30860000000001, 2.15479999999999, 1.88419999999999, 1.6508,
0.677199999999999, 1.72540000000001, 1.4752, 1.72280000000001,
1.66139999999999, 1.16759999999999, 1.79300000000001,
1.00059999999999, 0.905200000000008, 0.659999999999997,
1.55879999999999, 1.1636, 0.688199999999995, 0.712600000000009,
0.450199999999995, 1.1978, 0.975599999999986, 0.165400000000005,
1.727, 1.19739999999999, -0.252600000000001, 1.13460000000001, 1.3048,
1.19479999999999, 0.313400000000001, 0.878999999999991,
1.12039999999999, 0.853000000000009, 1.67920000000001,
0.856999999999999, 0.448599999999999, 1.2362, 0.953399999999988,
1.02859999999998, 0.563199999999995, 0.663000000000011,
0.723000000000013, 0.756599999999992, 0.256599999999992,
-0.837600000000009, 0.620000000000005, 0.821599999999989,
0.216600000000028, 0.205600000000004, 0.220199999999977,
0.372599999999977, 0.334400000000016, 0.928400000000011,
0.972800000000007, 0.192400000000021, 0.487199999999973,
-0.413000000000011, 0.807000000000016, 0.120600000000024,
0.769000000000005, 0.870799999999974, 0.66500000000002,
0.118200000000002, 0.401200000000017, 0.635199999999998,
0.135400000000004, 0.175599999999974, 1.16059999999999,
0.34620000000001, 0.521400000000028, -0.586599999999976,
-1.16480000000001, 0.968399999999974, 0.836999999999989,
0.779600000000016, 0.985799999999983],
&[91.0, 89.4934, 87.9758, 86.4574, 84.9718, 83.4954, 81.5302, 80.0756,
78.6374, 77.1782, 75.7888, 73.9522, 72.592, 71.2532, 69.9086, 68.5938,
66.9474, 65.6796, 64.4394, 63.2176, 61.9768, 60.4214, 59.2528,
58.0102, 56.8658, 55.7278, 54.3044, 53.1316, 52.093, 51.0032, 49.9092,
48.6306, 47.5294, 46.5756, 45.6508, 44.662, 43.552, 42.3724, 41.617,
40.5754, 39.7872, 38.8444, 37.7988, 36.8606, 36.2118, 35.3566,
34.4476, 33.5882, 32.6816, 32.0824, 31.0258, 30.6048, 29.4436,
28.7274, 27.957, 27.147, 26.4364, 25.7592, 25.3386, 24.781, 23.8028,
23.656, 22.6544, 21.996, 21.4718, 21.1544, 20.6098, 19.5956, 19.0616,
18.5758, 18.4878, 17.5244, 17.2146, 16.724, 15.8722, 15.5198, 15.0414,
14.941, 14.9048, 13.87, 13.4304, 13.028, 12.4708, 12.37, 12.0624,
11.4668, 11.5532, 11.4352, 11.2564, 10.2744, 10.2118,
9.74720000000002, 10.1456, 9.2928, 8.75040000000001, 8.55279999999999,
8.97899999999998, 8.21019999999999, 8.18340000000001, 7.3494,
7.32499999999999, 7.66140000000001, 6.90300000000002,
7.25439999999998, 6.9042, 7.21499999999997, 6.28640000000001,
6.08139999999997, 6.6764, 6.30099999999999, 5.13900000000001,
5.65800000000002, 5.17320000000001, 4.59019999999998, 4.9538,
5.08280000000002, 4.92200000000003, 4.99020000000002, 4.7328, 5.4538,
4.11360000000002, 4.22340000000003, 4.08780000000002,
3.70800000000003, 4.15559999999999, 4.18520000000001,
3.63720000000001, 3.68220000000002, 3.77960000000002, 3.6078,
2.49160000000001, 3.13099999999997, 2.5376, 3.19880000000001,
3.21100000000001, 2.4502, 3.52820000000003, 2.91199999999998,
3.04480000000001, 2.7432, 2.85239999999999, 2.79880000000003,
2.78579999999999, 1.88679999999999, 2.98860000000002,
2.50639999999999, 1.91239999999999, 2.66160000000002,
2.46820000000002, 1.58199999999999, 1.30399999999997,
2.27379999999999, 2.68939999999998, 1.32900000000001,
3.10599999999999, 1.69080000000002, 2.13740000000001,
2.53219999999999, 1.88479999999998, 1.33240000000001,
1.45119999999997, 1.17899999999997, 2.44119999999998,
1.60659999999996, 2.16700000000003, 0.77940000000001,
2.37900000000002, 2.06700000000001, 1.46000000000004,
2.91160000000002, 1.69200000000001, 0.954600000000028,
2.49300000000005, 2.2722, 1.33500000000004, 2.44899999999996,
1.20140000000004, 3.07380000000001, 2.09739999999999,
2.85640000000001, 2.29960000000005, 2.40899999999999,
1.97040000000004, 0.809799999999996, 1.65279999999996,
2.59979999999996, 0.95799999999997, 2.06799999999998,
2.32780000000002, 4.20159999999998, 1.96320000000003,
1.86400000000003, 1.42999999999995, 3.77940000000001,
1.27200000000005, 1.86440000000005, 2.20600000000002,
3.21900000000005, 1.5154, 2.61019999999996],
&[183.2152, 180.2454, 177.2096, 173.6652, 170.6312, 167.6822, 164.249,
161.3296, 158.0038, 155.2074, 152.4612, 149.27, 146.5178, 143.4412,
140.8032, 138.1634, 135.1688, 132.6074, 129.6946, 127.2664, 124.8228,
122.0432, 119.6824, 116.9464, 114.6268, 112.2626, 109.8376, 107.4034,
104.8956, 102.8522, 100.7638, 98.3552, 96.3556, 93.7526, 91.9292,
89.8954, 87.8198, 85.7668, 83.298, 81.6688, 79.9466, 77.9746, 76.1672,
74.3474, 72.3028, 70.8912, 69.114, 67.4646, 65.9744, 64.4092, 62.6022,
60.843, 59.5684, 58.1652, 56.5426, 55.4152, 53.5388, 52.3592, 51.1366,
49.486, 48.3918, 46.5076, 45.509, 44.3834, 43.3498, 42.0668, 40.7346,
40.1228, 38.4528, 37.7, 36.644, 36.0518, 34.5774, 33.9068, 32.432,
32.1666, 30.434, 29.6644, 28.4894, 27.6312, 26.3804, 26.292,
25.5496000000001, 25.0234, 24.8206, 22.6146, 22.4188, 22.117, 20.6762,
20.6576, 19.7864, 19.509, 18.5334, 17.9204, 17.772, 16.2924, 16.8654,
15.1836, 15.745, 15.1316, 15.0386, 14.0136, 13.6342, 12.6196, 12.1866,
12.4281999999999, 11.3324, 10.4794000000001, 11.5038, 10.129,
9.52800000000002, 10.3203999999999, 9.46299999999997,
9.79280000000006, 9.12300000000005, 8.74180000000001, 9.2192,
7.51020000000005, 7.60659999999996, 7.01840000000004,
7.22239999999999, 7.40139999999997, 6.76179999999999,
7.14359999999999, 5.65060000000005, 5.63779999999997,
5.76599999999996, 6.75139999999999, 5.57759999999996,
3.73220000000003, 5.8048, 5.63019999999995, 4.93359999999996,
3.47979999999995, 4.33879999999999, 3.98940000000005,
3.81960000000004, 3.31359999999995, 3.23080000000004, 3.4588,
3.08159999999998, 3.4076, 3.00639999999999, 2.38779999999997,
2.61900000000003, 1.99800000000005, 3.34820000000002,
2.95060000000001, 0.990999999999985, 2.11440000000005,
2.20299999999997, 2.82219999999995, 2.73239999999998, 2.7826,
3.76660000000004, 2.26480000000004, 2.31280000000004,
2.40819999999997, 2.75360000000001, 3.33759999999995,
2.71559999999999, 1.7478000000001, 1.42920000000004, 2.39300000000003,
2.22779999999989, 2.34339999999997, 0.87259999999992,
3.88400000000001, 1.80600000000004, 1.91759999999999,
1.16779999999994, 1.50320000000011, 2.52500000000009,
0.226400000000012, 2.31500000000005, 0.930000000000064,
1.25199999999995, 2.14959999999996, 0.0407999999999902,
2.5447999999999, 1.32960000000003, 0.197400000000016,
2.52620000000002, 3.33279999999991, -1.34300000000007,
0.422199999999975, 0.917200000000093, 1.12920000000008,
1.46060000000011, 1.45779999999991, 2.8728000000001, 3.33359999999993,
-1.34079999999994, 1.57680000000005, 0.363000000000056,
1.40740000000005, 0.656600000000026, 0.801400000000058,
-0.454600000000028, 1.51919999999996],
&[368.0, 361.8294, 355.2452, 348.6698, 342.1464, 336.2024, 329.8782,
323.6598, 317.462, 311.2826, 305.7102, 299.7416, 293.9366, 288.1046,
282.285, 277.0668, 271.306, 265.8448, 260.301, 254.9886, 250.2422,
244.8138, 239.7074, 234.7428, 229.8402, 225.1664, 220.3534, 215.594,
210.6886, 205.7876, 201.65, 197.228, 192.8036, 188.1666, 184.0818,
180.0824, 176.2574, 172.302, 168.1644, 164.0056, 160.3802, 156.7192,
152.5234, 149.2084, 145.831, 142.485, 139.1112, 135.4764, 131.76,
129.3368, 126.5538, 122.5058, 119.2646, 116.5902, 113.3818, 110.8998,
107.9532, 105.2062, 102.2798, 99.4728, 96.9582, 94.3292, 92.171,
89.7809999999999, 87.5716, 84.7048, 82.5322, 79.875, 78.3972, 75.3464,
73.7274, 71.2834, 70.1444, 68.4263999999999, 66.0166, 64.018,
62.0437999999999, 60.3399999999999, 58.6856, 57.9836,
55.0311999999999, 54.6769999999999, 52.3188, 51.4846,
49.4423999999999, 47.739, 46.1487999999999, 44.9202, 43.4059999999999,
42.5342000000001, 41.2834, 38.8954000000001, 38.3286000000001,
36.2146, 36.6684, 35.9946, 33.123, 33.4338, 31.7378000000001, 29.076,
28.9692, 27.4964, 27.0998, 25.9864, 26.7754, 24.3208, 23.4838,
22.7388000000001, 24.0758000000001, 21.9097999999999, 20.9728,
19.9228000000001, 19.9292, 16.617, 17.05, 18.2996000000001,
15.6128000000001, 15.7392, 14.5174, 13.6322, 12.2583999999999,
13.3766000000001, 11.423, 13.1232, 9.51639999999998, 10.5938000000001,
9.59719999999993, 8.12220000000002, 9.76739999999995,
7.50440000000003, 7.56999999999994, 6.70440000000008,
6.41419999999994, 6.71019999999999, 5.60940000000005,
4.65219999999999, 6.84099999999989, 3.4072000000001, 3.97859999999991,
3.32760000000007, 5.52160000000003, 3.31860000000006,
2.06940000000009, 4.35400000000004, 1.57500000000005,
0.280799999999999, 2.12879999999996, -0.214799999999968,
-0.0378000000000611, -0.658200000000079, 0.654800000000023,
-0.0697999999999865, 0.858400000000074, -2.52700000000004,
-2.1751999999999, -3.35539999999992, -1.04019999999991,
-0.651000000000067, -2.14439999999991, -1.96659999999997,
-3.97939999999994, -0.604400000000169, -3.08260000000018,
-3.39159999999993, -5.29640000000018, -5.38920000000007,
-5.08759999999984, -4.69900000000007, -5.23720000000003,
-3.15779999999995, -4.97879999999986, -4.89899999999989,
-7.48880000000008, -5.94799999999987, -5.68060000000014,
-6.67180000000008, -4.70499999999993, -7.27779999999984,
-4.6579999999999, -4.4362000000001, -4.32139999999981,
-5.18859999999995, -6.66879999999992, -6.48399999999992,
-5.1260000000002, -4.4032000000002, -6.13500000000022,
-5.80819999999994, -4.16719999999987, -4.15039999999999,
-7.45600000000013, -7.24080000000004, -9.83179999999993,
-5.80420000000004, -8.6561999999999, -6.99940000000015,
-10.5473999999999, -7.34139999999979, -6.80999999999995,
-6.29719999999998, -6.23199999999997],
&[737.1256, 724.4234, 711.1064, 698.4732, 685.4636, 673.0644, 660.488,
647.9654, 636.0832, 623.7864, 612.1992, 600.2176, 588.5228, 577.1716,
565.7752, 554.899, 543.6126, 532.6492, 521.9474, 511.5214, 501.1064,
490.6364, 480.2468, 470.4588, 460.3832, 451.0584, 440.8606, 431.3868,
422.5062, 413.1862, 404.463, 395.339, 386.1936, 378.1292, 369.1854,
361.2908, 353.3324, 344.8518, 337.5204, 329.4854, 321.9318, 314.552,
306.4658, 299.4256, 292.849, 286.152, 278.8956, 271.8792, 265.118,
258.62, 252.5132, 245.9322, 239.7726, 233.6086, 227.5332, 222.5918,
216.4294, 210.7662, 205.4106, 199.7338, 194.9012, 188.4486, 183.1556,
178.6338, 173.7312, 169.6264, 163.9526, 159.8742, 155.8326, 151.1966,
147.5594, 143.07, 140.037, 134.1804, 131.071, 127.4884, 124.0848,
120.2944, 117.333, 112.9626, 110.2902, 107.0814, 103.0334,
99.4832000000001, 96.3899999999999, 93.7202000000002,
90.1714000000002, 87.2357999999999, 85.9346, 82.8910000000001,
80.0264000000002, 78.3834000000002, 75.1543999999999,
73.8683999999998, 70.9895999999999, 69.4367999999999,
64.8701999999998, 65.0408000000002, 61.6738, 59.5207999999998,
57.0158000000001, 54.2302, 53.0962, 50.4985999999999,
52.2588000000001, 47.3914, 45.6244000000002, 42.8377999999998,
43.0072, 40.6516000000001, 40.2453999999998, 35.2136, 36.4546,
33.7849999999999, 33.2294000000002, 32.4679999999998,
30.8670000000002, 28.6507999999999, 28.9099999999999,
27.5983999999999, 26.1619999999998, 24.5563999999999,
23.2328000000002, 21.9484000000002, 21.5902000000001,
21.3346000000001, 17.7031999999999, 20.6111999999998,
19.5545999999999, 15.7375999999999, 17.0720000000001,
16.9517999999998, 15.326, 13.1817999999998, 14.6925999999999,
13.0859999999998, 13.2754, 10.8697999999999, 11.248, 7.3768,
4.72339999999986, 7.97899999999981, 8.7503999999999, 7.68119999999999,
9.7199999999998, 7.73919999999998, 5.6224000000002, 7.44560000000001,
6.6601999999998, 5.9058, 4.00199999999995, 4.51699999999983,
4.68240000000014, 3.86220000000003, 5.13639999999987,
5.98500000000013, 2.47719999999981, 2.61999999999989,
1.62800000000016, 4.65000000000009, 0.225599999999758,
0.831000000000131, -0.359400000000278, 1.27599999999984,
-2.92559999999958, -0.0303999999996449, 2.37079999999969,
-2.0033999999996, 0.804600000000391, 0.30199999999968,
1.1247999999996, -2.6880000000001, 0.0321999999996478,
-1.18099999999959, -3.9402, -1.47940000000017, -0.188400000000001,
-2.10720000000038, -2.04159999999956, -3.12880000000041,
-4.16160000000036, -0.612799999999879, -3.48719999999958,
-8.17900000000009, -5.37780000000021, -4.01379999999972,
-5.58259999999973, -5.73719999999958, -7.66799999999967,
-5.69520000000011, -1.1247999999996, -5.58520000000044,
-8.04560000000038, -4.64840000000004, -11.6468000000004,
-7.97519999999986, -5.78300000000036, -7.67420000000038,
-10.6328000000003, -9.81720000000041],
&[1476.0, 1449.6014, 1423.5802, 1397.7942, 1372.3042, 1347.2062,
1321.8402, 1297.2292, 1272.9462, 1248.9926, 1225.3026, 1201.4252,
1178.0578, 1155.6092, 1132.626, 1110.5568, 1088.527, 1066.5154,
1045.1874, 1024.3878, 1003.37, 982.1972, 962.5728, 942.1012, 922.9668,
903.292, 884.0772, 864.8578, 846.6562, 828.041, 809.714, 792.3112,
775.1806, 757.9854, 740.656, 724.346, 707.5154, 691.8378, 675.7448,
659.6722, 645.5722, 630.1462, 614.4124, 600.8728, 585.898, 572.408,
558.4926, 544.4938, 531.6776, 517.282, 505.7704, 493.1012, 480.7388,
467.6876, 456.1872, 445.5048, 433.0214, 420.806, 411.409, 400.4144,
389.4294, 379.2286, 369.651, 360.6156, 350.337, 342.083, 332.1538,
322.5094, 315.01, 305.6686, 298.1678, 287.8116, 280.9978, 271.9204,
265.3286, 257.5706, 249.6014, 242.544, 235.5976, 229.583, 220.9438,
214.672, 208.2786, 201.8628, 195.1834, 191.505, 186.1816, 178.5188,
172.2294, 167.8908, 161.0194, 158.052, 151.4588, 148.1596, 143.4344,
138.5238, 133.13, 127.6374, 124.8162, 118.7894, 117.3984, 114.6078,
109.0858, 105.1036, 103.6258, 98.6018000000004, 95.7618000000002,
93.5821999999998, 88.5900000000001, 86.9992000000002,
82.8800000000001, 80.4539999999997, 74.6981999999998,
74.3644000000004, 73.2914000000001, 65.5709999999999,
66.9232000000002, 65.1913999999997, 62.5882000000001,
61.5702000000001, 55.7035999999998, 56.1764000000003,
52.7596000000003, 53.0302000000001, 49.0609999999997, 48.4694, 44.933,
46.0474000000004, 44.7165999999997, 41.9416000000001,
39.9207999999999, 35.6328000000003, 35.5276000000003,
33.1934000000001, 33.2371999999996, 33.3864000000003,
33.9228000000003, 30.2371999999996, 29.1373999999996,
25.2272000000003, 24.2942000000003, 19.8338000000003,
18.9005999999999, 23.0907999999999, 21.8544000000002,
19.5176000000001, 15.4147999999996, 16.9314000000004,
18.6737999999996, 12.9877999999999, 14.3688000000002,
12.0447999999997, 15.5219999999999, 12.5299999999997,
14.5940000000001, 14.3131999999996, 9.45499999999993,
12.9441999999999, 3.91139999999996, 13.1373999999996,
5.44720000000052, 9.82779999999912, 7.87279999999919,
3.67760000000089, 5.46980000000076, 5.55099999999948,
5.65979999999945, 3.89439999999922, 3.1275999999998, 5.65140000000065,
6.3062000000009, 3.90799999999945, 1.87060000000019, 5.17020000000048,
2.46680000000015, 0.770000000000437, -3.72340000000077,
1.16400000000067, 8.05340000000069, 0.135399999999208,
2.15940000000046, 0.766999999999825, 1.0594000000001,
3.15500000000065, -0.287399999999252, 2.37219999999979,
-2.86620000000039, -1.63199999999961, -2.22979999999916,
-0.15519999999924, -1.46039999999994, -0.262199999999211,
-2.34460000000036, -2.8078000000005, -3.22179999999935,
-5.60159999999996, -8.42200000000048, -9.43740000000071,
0.161799999999857, -10.4755999999998, -10.0823999999993],
&[2953.0, 2900.4782, 2848.3568, 2796.3666, 2745.324, 2694.9598,
2644.648, 2595.539, 2546.1474, 2498.2576, 2450.8376, 2403.6076,
2357.451, 2311.38, 2266.4104, 2221.5638, 2176.9676, 2134.193,
2090.838, 2048.8548, 2007.018, 1966.1742, 1925.4482, 1885.1294,
1846.4776, 1807.4044, 1768.8724, 1731.3732, 1693.4304, 1657.5326,
1621.949, 1586.5532, 1551.7256, 1517.6182, 1483.5186, 1450.4528,
1417.865, 1385.7164, 1352.6828, 1322.6708, 1291.8312, 1260.9036,
1231.476, 1201.8652, 1173.6718, 1145.757, 1119.2072, 1092.2828,
1065.0434, 1038.6264, 1014.3192, 988.5746, 965.0816, 940.1176,
917.9796, 894.5576, 871.1858, 849.9144, 827.1142, 805.0818, 783.9664,
763.9096, 742.0816, 724.3962, 706.3454, 688.018, 667.4214, 650.3106,
633.0686, 613.8094, 597.818, 581.4248, 563.834, 547.363, 531.5066,
520.455400000001, 505.583199999999, 488.366, 476.480799999999,
459.7682, 450.0522, 434.328799999999, 423.952799999999,
408.727000000001, 399.079400000001, 387.252200000001,
373.987999999999, 360.852000000001, 351.6394, 339.642,
330.902400000001, 322.661599999999, 311.662200000001, 301.3254,
291.7484, 279.939200000001, 276.7508, 263.215200000001,
254.811400000001, 245.5494, 242.306399999999, 234.8734,
223.787200000001, 217.7156, 212.0196, 200.793, 195.9748, 189.0702,
182.449199999999, 177.2772, 170.2336, 164.741, 158.613600000001,
155.311, 147.5964, 142.837, 137.3724, 132.0162, 130.0424, 121.9804,
120.451800000001, 114.8968, 111.585999999999, 105.933199999999,
101.705, 98.5141999999996, 95.0488000000005, 89.7880000000005,
91.4750000000004, 83.7764000000006, 80.9698000000008,
72.8574000000008, 73.1615999999995, 67.5838000000003,
62.6263999999992, 63.2638000000006, 66.0977999999996,
52.0843999999997, 58.9956000000002, 47.0912000000008,
46.4956000000002, 48.4383999999991, 47.1082000000006, 43.2392,
37.2759999999998, 40.0283999999992, 35.1864000000005,
35.8595999999998, 32.0998, 28.027, 23.6694000000007, 33.8266000000003,
26.3736000000008, 27.2008000000005, 21.3245999999999,
26.4115999999995, 23.4521999999997, 19.5013999999992,
19.8513999999996, 10.7492000000002, 18.6424000000006,
13.1265999999996, 18.2436000000016, 6.71860000000015,
3.39459999999963, 6.33759999999893, 7.76719999999841,
0.813999999998487, 3.82819999999992, 0.826199999999517,
8.07440000000133, -1.59080000000176, 5.01780000000144,
0.455399999998917, -0.24199999999837, 0.174800000000687,
-9.07640000000174, -4.20160000000033, -3.77520000000004,
-4.75179999999818, -5.3724000000002, -8.90680000000066,
-6.10239999999976, -5.74120000000039, -9.95339999999851,
-3.86339999999836, -13.7304000000004, -16.2710000000006,
-7.51359999999841, -3.30679999999847, -13.1339999999982,
-10.0551999999989, -6.72019999999975, -8.59660000000076,
-10.9307999999983, -1.8775999999998, -4.82259999999951, -13.7788,
-21.6470000000008, -10.6735999999983, -15.7799999999988],
&[5907.5052, 5802.2672, 5697.347, 5593.5794, 5491.2622, 5390.5514,
5290.3376, 5191.6952, 5093.5988, 4997.3552, 4902.5972, 4808.3082,
4715.5646, 4624.109, 4533.8216, 4444.4344, 4356.3802, 4269.2962,
4183.3784, 4098.292, 4014.79, 3932.4574, 3850.6036, 3771.2712,
3691.7708, 3615.099, 3538.1858, 3463.4746, 3388.8496, 3315.6794,
3244.5448, 3173.7516, 3103.3106, 3033.6094, 2966.5642, 2900.794,
2833.7256, 2769.81, 2707.3196, 2644.0778, 2583.9916, 2523.4662,
2464.124, 2406.073, 2347.0362, 2292.1006, 2238.1716, 2182.7514,
2128.4884, 2077.1314, 2025.037, 1975.3756, 1928.933, 1879.311,
1831.0006, 1783.2144, 1738.3096, 1694.5144, 1649.024, 1606.847,
1564.7528, 1525.3168, 1482.5372, 1443.9668, 1406.5074, 1365.867,
1329.2186, 1295.4186, 1257.9716, 1225.339, 1193.2972, 1156.3578,
1125.8686, 1091.187, 1061.4094, 1029.4188, 1000.9126, 972.3272,
944.004199999999, 915.7592, 889.965, 862.834200000001, 840.4254,
812.598399999999, 785.924200000001, 763.050999999999,
741.793799999999, 721.466, 699.040799999999, 677.997200000002,
649.866999999998, 634.911800000002, 609.8694, 591.981599999999,
570.2922, 557.129199999999, 538.3858, 521.872599999999,
502.951400000002, 495.776399999999, 475.171399999999, 459.751,
439.995200000001, 426.708999999999, 413.7016, 402.3868,
387.262599999998, 372.0524, 357.050999999999, 342.5098,
334.849200000001, 322.529399999999, 311.613799999999,
295.848000000002, 289.273000000001, 274.093000000001,
263.329600000001, 251.389599999999, 245.7392, 231.9614, 229.7952,
217.155200000001, 208.9588, 199.016599999999, 190.839199999999,
180.6976, 176.272799999999, 166.976999999999, 162.5252,
151.196400000001, 149.386999999999, 133.981199999998, 130.0586,
130.164000000001, 122.053400000001, 110.7428, 108.1276,
106.232400000001, 100.381600000001, 98.7668000000012,
86.6440000000002, 79.9768000000004, 82.4722000000002,
68.7026000000005, 70.1186000000016, 71.9948000000004, 58.998599999999,
59.0492000000013, 56.9818000000014, 47.5338000000011, 42.9928,
51.1591999999982, 37.2740000000013, 42.7220000000016,
31.3734000000004, 26.8090000000011, 25.8934000000008,
26.5286000000015, 29.5442000000003, 19.3503999999994,
26.0760000000009, 17.9527999999991, 14.8419999999969,
10.4683999999979, 8.65899999999965, 9.86720000000059,
4.34139999999752, -0.907800000000861, -3.32080000000133,
-0.936199999996461, -11.9916000000012, -8.87000000000262,
-6.33099999999831, -11.3366000000024, -15.9207999999999,
-9.34659999999712, -15.5034000000014, -19.2097999999969,
-15.357799999998, -28.2235999999975, -30.6898000000001,
-19.3271999999997, -25.6083999999973, -24.409599999999,
-13.6385999999984, -33.4473999999973, -32.6949999999997,
-28.9063999999998, -31.7483999999968, -32.2935999999972,
-35.8329999999987, -47.620600000002, -39.0855999999985,
-33.1434000000008, -46.1371999999974, -37.5892000000022,
-46.8164000000033, -47.3142000000007, -60.2914000000019,
-37.7575999999972],
&[11816.475, 11605.0046, 11395.3792, 11188.7504, 10984.1814, 10782.0086,
10582.0072, 10384.503, 10189.178, 9996.2738, 9806.0344, 9617.9798,
9431.394, 9248.7784, 9067.6894, 8889.6824, 8712.9134, 8538.8624,
8368.4944, 8197.7956, 8031.8916, 7866.6316, 7703.733, 7544.5726,
7386.204, 7230.666, 7077.8516, 6926.7886, 6778.6902, 6631.9632,
6487.304, 6346.7486, 6206.4408, 6070.202, 5935.2576, 5799.924,
5671.0324, 5541.9788, 5414.6112, 5290.0274, 5166.723, 5047.6906,
4929.162, 4815.1406, 4699.127, 4588.5606, 4477.7394, 4369.4014,
4264.2728, 4155.9224, 4055.581, 3955.505, 3856.9618, 3761.3828,
3666.9702, 3575.7764, 3482.4132, 3395.0186, 3305.8852, 3221.415,
3138.6024, 3056.296, 2970.4494, 2896.1526, 2816.8008, 2740.2156,
2670.497, 2594.1458, 2527.111, 2460.8168, 2387.5114, 2322.9498,
2260.6752, 2194.2686, 2133.7792, 2074.767, 2015.204, 1959.4226,
1898.6502, 1850.006, 1792.849, 1741.4838, 1687.9778, 1638.1322,
1589.3266, 1543.1394, 1496.8266, 1447.8516, 1402.7354, 1361.9606,
1327.0692, 1285.4106, 1241.8112, 1201.6726, 1161.973, 1130.261,
1094.2036, 1048.2036, 1020.6436, 990.901400000002, 961.199800000002,
924.769800000002, 899.526400000002, 872.346400000002, 834.375,
810.432000000001, 780.659800000001, 756.013800000001,
733.479399999997, 707.923999999999, 673.858, 652.222399999999,
636.572399999997, 615.738599999997, 586.696400000001,
564.147199999999, 541.679600000003, 523.943599999999,
505.714599999999, 475.729599999999, 461.779600000002,
449.750800000002, 439.020799999998, 412.7886, 400.245600000002,
383.188199999997, 362.079599999997, 357.533799999997,
334.319000000003, 327.553399999997, 308.559399999998,
291.270199999999, 279.351999999999, 271.791400000002,
252.576999999997, 247.482400000001, 236.174800000001,
218.774599999997, 220.155200000001, 208.794399999999,
201.223599999998, 182.995600000002, 185.5268, 164.547400000003,
176.5962, 150.689599999998, 157.8004, 138.378799999999,
134.021200000003, 117.614399999999, 108.194000000003,
97.0696000000025, 89.6042000000016, 95.6030000000028,
84.7810000000027, 72.635000000002, 77.3482000000004, 59.4907999999996,
55.5875999999989, 50.7346000000034, 61.3916000000027,
50.9149999999936, 39.0384000000049, 58.9395999999979, 29.633600000001,
28.2032000000036, 26.0078000000067, 17.0387999999948,
9.22000000000116, 13.8387999999977, 8.07240000000456,
14.1549999999988, 15.3570000000036, 3.42660000000615,
6.24820000000182, -2.96940000000177, -8.79940000000352,
-5.97860000000219, -14.4048000000039, -3.4143999999942,
-13.0148000000045, -11.6977999999945, -25.7878000000055,
-22.3185999999987, -24.409599999999, -31.9756000000052,
-18.9722000000038, -22.8678000000073, -30.8972000000067,
-32.3715999999986, -22.3907999999938, -43.6720000000059, -35.9038,
-39.7492000000057, -54.1641999999993, -45.2749999999942,
-42.2989999999991, -44.1089999999967, -64.3564000000042,
-49.9551999999967, -42.6116000000038],
&[23634.0036, 23210.8034, 22792.4744, 22379.1524, 21969.7928, 21565.326,
21165.3532, 20770.2806, 20379.9892, 19994.7098, 19613.318, 19236.799,
18865.4382, 18498.8244, 18136.5138, 17778.8668, 17426.2344, 17079.32,
16734.778, 16397.2418, 16063.3324, 15734.0232, 15409.731, 15088.728,
14772.9896, 14464.1402, 14157.5588, 13855.5958, 13559.3296,
13264.9096, 12978.326, 12692.0826, 12413.8816, 12137.3192, 11870.2326,
11602.5554, 11340.3142, 11079.613, 10829.5908, 10583.5466, 10334.0344,
10095.5072, 9859.694, 9625.2822, 9395.7862, 9174.0586, 8957.3164,
8738.064, 8524.155, 8313.7396, 8116.9168, 7913.542, 7718.4778,
7521.65, 7335.5596, 7154.2906, 6968.7396, 6786.3996, 6613.236,
6437.406, 6270.6598, 6107.7958, 5945.7174, 5787.6784, 5635.5784,
5482.308, 5337.9784, 5190.0864, 5045.9158, 4919.1386, 4771.817,
4645.7742, 4518.4774, 4385.5454, 4262.6622, 4142.74679999999,
4015.5318, 3897.9276, 3790.7764, 3685.13800000001, 3573.6274,
3467.9706, 3368.61079999999, 3271.5202, 3170.3848, 3076.4656,
2982.38400000001, 2888.4664, 2806.4868, 2711.9564, 2634.1434,
2551.3204, 2469.7662, 2396.61139999999, 2318.9902, 2243.8658,
2171.9246, 2105.01360000001, 2028.8536, 1960.9952, 1901.4096,
1841.86079999999, 1777.54700000001, 1714.5802, 1654.65059999999,
1596.311, 1546.2016, 1492.3296, 1433.8974, 1383.84600000001,
1339.4152, 1293.5518, 1245.8686, 1193.50659999999, 1162.27959999999,
1107.19439999999, 1069.18060000001, 1035.09179999999,
999.679000000004, 957.679999999993, 925.300199999998,
888.099400000006, 848.638600000006, 818.156400000007,
796.748399999997, 752.139200000005, 725.271200000003, 692.216,
671.633600000001, 647.939799999993, 621.670599999998,
575.398799999995, 561.226599999995, 532.237999999998,
521.787599999996, 483.095799999996, 467.049599999998,
465.286399999997, 415.548599999995, 401.047399999996,
380.607999999993, 377.362599999993, 347.258799999996,
338.371599999999, 310.096999999994, 301.409199999995,
276.280799999993, 265.586800000005, 258.994399999996,
223.915999999997, 215.925399999993, 213.503800000006,
191.045400000003, 166.718200000003, 166.259000000005,
162.941200000001, 148.829400000002, 141.645999999993,
123.535399999993, 122.329800000007, 89.473399999988, 80.1962000000058,
77.5457999999926, 59.1056000000099, 83.3509999999951,
52.2906000000075, 36.3979999999865, 40.6558000000077,
42.0003999999899, 19.6630000000005, 19.7153999999864,
-8.38539999999921, -0.692799999989802, 0.854800000000978,
3.23219999999856, -3.89040000000386, -5.25880000001052,
-24.9052000000083, -22.6837999999989, -26.4286000000138,
-34.997000000003, -37.0216000000073, -43.430400000012,
-58.2390000000014, -68.8034000000043, -56.9245999999985,
-57.8583999999973, -77.3097999999882, -73.2793999999994,
-81.0738000000129, -87.4530000000086, -65.0254000000132,
-57.296399999992, -96.2746000000043, -103.25, -96.081600000005,
-91.5542000000132, -102.465200000006, -107.688599999994,
-101.458000000013, -109.715800000005],
&[47270.0, 46423.3584, 45585.7074, 44757.152, 43938.8416, 43130.9514,
42330.03, 41540.407, 40759.6348, 39988.206, 39226.5144, 38473.2096,
37729.795, 36997.268, 36272.6448, 35558.665, 34853.0248, 34157.4472,
33470.5204, 32793.5742, 32127.0194, 31469.4182, 30817.6136,
30178.6968, 29546.8908, 28922.8544, 28312.271, 27707.0924, 27114.0326,
26526.692, 25948.6336, 25383.7826, 24823.5998, 24272.2974, 23732.2572,
23201.4976, 22674.2796, 22163.6336, 21656.515, 21161.7362, 20669.9368,
20189.4424, 19717.3358, 19256.3744, 18795.9638, 18352.197, 17908.5738,
17474.391, 17052.918, 16637.2236, 16228.4602, 15823.3474, 15428.6974,
15043.0284, 14667.6278, 14297.4588, 13935.2882, 13578.5402,
13234.6032, 12882.1578, 12548.0728, 12219.231, 11898.0072, 11587.2626,
11279.9072, 10973.5048, 10678.5186, 10392.4876, 10105.2556, 9825.766,
9562.5444, 9294.2222, 9038.2352, 8784.848, 8533.2644, 8301.7776,
8058.30859999999, 7822.94579999999, 7599.11319999999,
7366.90779999999, 7161.217, 6957.53080000001, 6736.212,
6548.21220000001, 6343.06839999999, 6156.28719999999,
5975.15419999999, 5791.75719999999, 5621.32019999999, 5451.66,
5287.61040000001, 5118.09479999999, 4957.288, 4798.4246,
4662.17559999999, 4512.05900000001, 4364.68539999999,
4220.77720000001, 4082.67259999999, 3957.19519999999,
3842.15779999999, 3699.3328, 3583.01180000001, 3473.8964,
3338.66639999999, 3233.55559999999, 3117.799, 3008.111,
2909.69140000001, 2814.86499999999, 2719.46119999999, 2624.742,
2532.46979999999, 2444.7886, 2370.1868, 2272.45259999999,
2196.19260000001, 2117.90419999999, 2023.2972, 1969.76819999999,
1885.58979999999, 1833.2824, 1733.91200000001, 1682.54920000001,
1604.57980000001, 1556.11240000001, 1491.3064, 1421.71960000001,
1371.22899999999, 1322.1324, 1264.7892, 1196.23920000001, 1143.8474,
1088.67240000001, 1073.60380000001, 1023.11660000001,
959.036400000012, 927.433199999999, 906.792799999996,
853.433599999989, 841.873800000001, 791.1054, 756.899999999994,
704.343200000003, 672.495599999995, 622.790399999998,
611.254799999995, 567.283200000005, 519.406599999988,
519.188400000014, 495.312800000014, 451.350799999986,
443.973399999988, 431.882199999993, 392.027000000002,
380.924200000009, 345.128999999986, 298.901400000002,
287.771999999997, 272.625, 247.253000000026, 222.490600000019,
223.590000000026, 196.407599999977, 176.425999999978,
134.725199999986, 132.4804, 110.445599999977, 86.7939999999944,
56.7038000000175, 64.915399999998, 38.3726000000024, 37.1606000000029,
46.170999999973, 49.1716000000015, 15.3362000000197, 6.71639999997569,
-34.8185999999987, -39.4476000000141, 12.6830000000191,
-12.3331999999937, -50.6565999999875, -59.9538000000175,
-65.1054000000004, -70.7576000000117, -106.325200000021,
-126.852200000023, -110.227599999984, -132.885999999999,
-113.897200000007, -142.713800000027, -151.145399999979,
-150.799200000009, -177.756200000003, -156.036399999983,
-182.735199999996, -177.259399999981, -198.663600000029,
-174.577600000019, -193.84580000001],
&[94541.0, 92848.811, 91174.019, 89517.558, 87879.9705, 86262.7565,
84663.5125, 83083.7435, 81521.7865, 79977.272, 78455.9465, 76950.219,
75465.432, 73994.152, 72546.71, 71115.2345, 69705.6765, 68314.937,
66944.2705, 65591.255, 64252.9485, 62938.016, 61636.8225, 60355.592,
59092.789, 57850.568, 56624.518, 55417.343, 54231.1415, 53067.387,
51903.526, 50774.649, 49657.6415, 48561.05, 47475.7575, 46410.159,
45364.852, 44327.053, 43318.4005, 42325.6165, 41348.4595, 40383.6265,
39436.77, 38509.502, 37594.035, 36695.939, 35818.6895, 34955.691,
34115.8095, 33293.949, 32465.0775, 31657.6715, 30877.2585, 30093.78,
29351.3695, 28594.1365, 27872.115, 27168.7465, 26477.076, 25774.541,
25106.5375, 24452.5135, 23815.5125, 23174.0655, 22555.2685,
21960.2065, 21376.3555, 20785.1925, 20211.517, 19657.0725, 19141.6865,
18579.737, 18081.3955, 17578.995, 17073.44, 16608.335, 16119.911,
15651.266, 15194.583, 14749.0495, 14343.4835, 13925.639, 13504.509,
13099.3885, 12691.2855, 12328.018, 11969.0345, 11596.5145, 11245.6355,
10917.6575, 10580.9785, 10277.8605, 9926.58100000001, 9605.538,
9300.42950000003, 8989.97850000003, 8728.73249999998, 8448.3235,
8175.31050000002, 7898.98700000002, 7629.79100000003,
7413.76199999999, 7149.92300000001, 6921.12650000001, 6677.1545,
6443.28000000003, 6278.23450000002, 6014.20049999998,
5791.20299999998, 5605.78450000001, 5438.48800000001, 5234.2255,
5059.6825, 4887.43349999998, 4682.935, 4496.31099999999,
4322.52250000002, 4191.42499999999, 4021.24200000003,
3900.64799999999, 3762.84250000003, 3609.98050000001,
3502.29599999997, 3363.84250000003, 3206.54849999998,
3079.70000000001, 2971.42300000001, 2867.80349999998,
2727.08100000001, 2630.74900000001, 2496.6165, 2440.902,
2356.19150000002, 2235.58199999999, 2120.54149999999,
2012.25449999998, 1933.35600000003, 1820.93099999998,
1761.54800000001, 1663.09350000002, 1578.84600000002,
1509.48149999999, 1427.3345, 1379.56150000001, 1306.68099999998,
1212.63449999999, 1084.17300000001, 1124.16450000001,
1060.69949999999, 1007.48849999998, 941.194499999983,
879.880500000028, 836.007500000007, 782.802000000025,
748.385499999975, 647.991500000004, 626.730500000005,
570.776000000013, 484.000500000024, 513.98550000001, 418.985499999952,
386.996999999974, 370.026500000036, 355.496999999974,
356.731499999994, 255.92200000002, 259.094000000041, 205.434499999974,
165.374500000034, 197.347500000033, 95.718499999959, 67.6165000000037,
54.6970000000438, 31.7395000000251, -15.8784999999916,
8.42500000004657, -26.3754999999655, -118.425500000012,
-66.6629999999423, -42.9745000000112, -107.364999999991,
-189.839000000036, -162.611499999999, -164.964999999967,
-189.079999999958, -223.931499999948, -235.329999999958,
-269.639500000048, -249.087999999989, -206.475499999942,
-283.04449999996, -290.667000000016, -304.561499999953,
-336.784499999951, -380.386500000022, -283.280499999993,
-364.533000000054, -389.059499999974, -364.454000000027,
-415.748000000021, -417.155000000028],
&[189083.0, 185696.913, 182348.774, 179035.946, 175762.762, 172526.444,
169329.754, 166166.099, 163043.269, 159958.91, 156907.912, 153906.845,
150924.199, 147996.568, 145093.457, 142239.233, 139421.475, 136632.27,
133889.588, 131174.2, 128511.619, 125868.621, 123265.385, 120721.061,
118181.769, 115709.456, 113252.446, 110840.198, 108465.099,
106126.164, 103823.469, 101556.618, 99308.004, 97124.508, 94937.803,
92833.731, 90745.061, 88677.627, 86617.47, 84650.442, 82697.833,
80769.132, 78879.629, 77014.432, 75215.626, 73384.587, 71652.482,
69895.93, 68209.301, 66553.669, 64921.981, 63310.323, 61742.115,
60205.018, 58698.658, 57190.657, 55760.865, 54331.169, 52908.167,
51550.273, 50225.254, 48922.421, 47614.533, 46362.049, 45098.569,
43926.083, 42736.03, 41593.473, 40425.26, 39316.237, 38243.651,
37170.617, 36114.609, 35084.19, 34117.233, 33206.509, 32231.505,
31318.728, 30403.404, 29540.0550000001, 28679.236, 27825.862,
26965.216, 26179.148, 25462.08, 24645.952, 23922.523, 23198.144,
22529.128, 21762.4179999999, 21134.779, 20459.117, 19840.818,
19187.04, 18636.3689999999, 17982.831, 17439.7389999999, 16874.547,
16358.2169999999, 15835.684, 15352.914, 14823.681, 14329.313,
13816.897, 13342.874, 12880.882, 12491.648, 12021.254, 11625.392,
11293.7610000001, 10813.697, 10456.209, 10099.074, 9755.39000000001,
9393.18500000006, 9047.57900000003, 8657.98499999999,
8395.85900000005, 8033.0, 7736.95900000003, 7430.59699999995,
7258.47699999996, 6924.58200000005, 6691.29399999999,
6357.92500000005, 6202.05700000003, 5921.19700000004,
5628.28399999999, 5404.96799999999, 5226.71100000001,
4990.75600000005, 4799.77399999998, 4622.93099999998, 4472.478,
4171.78700000001, 3957.46299999999, 3868.95200000005,
3691.14300000004, 3474.63100000005, 3341.67200000002,
3109.14000000001, 3071.97400000005, 2796.40399999998,
2756.17799999996, 2611.46999999997, 2471.93000000005,
2382.26399999997, 2209.22400000005, 2142.28399999999,
2013.96100000001, 1911.18999999994, 1818.27099999995,
1668.47900000005, 1519.65800000005, 1469.67599999998,
1367.13800000004, 1248.52899999998, 1181.23600000003,
1022.71900000004, 1088.20700000005, 959.03600000008, 876.095999999903,
791.183999999892, 703.337000000058, 731.949999999953, 586.86400000006,
526.024999999907, 323.004999999888, 320.448000000091,
340.672999999952, 309.638999999966, 216.601999999955,
102.922999999952, 19.2399999999907, -0.114000000059605,
-32.6240000000689, -89.3179999999702, -153.497999999905,
-64.2970000000205, -143.695999999996, -259.497999999905,
-253.017999999924, -213.948000000091, -397.590000000084,
-434.006000000052, -403.475000000093, -297.958000000101,
-404.317000000039, -528.898999999976, -506.621000000043,
-513.205000000075, -479.351000000024, -596.139999999898,
-527.016999999993, -664.681000000099, -680.306000000099,
-704.050000000047, -850.486000000034, -757.43200000003,
-713.308999999892]];
pub struct HyperLogLog {
priv alpha: f64,
priv p: u8,
priv m: uint,
priv M: ~[u8],
priv hash_key_1: u64,
priv hash_key_2: u64,
}
impl HyperLogLog {
pub fn new(error_rate: f64) -> HyperLogLog {
assert!(error_rate > 0.0 && error_rate < 1.0);
let sr = 1.04 / error_rate;
let p = (sr * sr).ln().ceil() as u8;
let alpha = HyperLogLog::get_alpha(p);
let m = 1u << p;
HyperLogLog{alpha: alpha,
p: p,
m: m,
M: vec::from_elem(m, 0u8),
hash_key_1: rand::random(),
hash_key_2: rand::random(),}
}
pub fn new_from_template(hll: &HyperLogLog) -> HyperLogLog {
HyperLogLog{alpha: hll.alpha,
p: hll.p,
m: hll.m,
M: vec::from_elem(hll.m, 0u8),
hash_key_1: hll.hash_key_1,
hash_key_2: hll.hash_key_2}
}
pub fn insert(&mut self, value: &str) {
let x = value.hash_keyed(self.hash_key_1, self.hash_key_2) as u64;
let j = x & (self.m - 1) as u64;
let w = x >> self.p;
let rho = HyperLogLog::get_rho(w, 64 - self.p);
if rho > self.M[j] {
self.M[j] = rho;
}
}
pub fn len(&self) -> f64 {
let V = HyperLogLog::vec_count_zero(self.M);
if V > 0 {
let H = self.m as f64 * (self.m as f64 / V as f64).ln();
if H <= HyperLogLog::get_treshold(self.p) {
H
} else {
self.ep()
}
} else {
self.ep()
}
}
pub fn merge(&mut self, src: &HyperLogLog) {
assert!(src.alpha == self.alpha);
assert!(src.p == self.p);
assert!(src.m == self.m);
assert!(src.hash_key_1 == self.hash_key_1);
assert!(src.hash_key_2 == self.hash_key_2);
for i in range(0, self.m) {
if src.M[i] > self.M[i] {
self.M[i] = src.M[i];
}
}
}
pub fn clear(&mut self) {
self.M.mut_iter().advance(|x| { *x = 0; true });
}
fn get_treshold(p: u8) -> f64 {
TRESHOLD_DATA[p]
}
fn get_alpha(p: u8) -> f64 {
assert!(p >= 4 && p <= 16);
match p {
4 => 0.673,
5 => 0.697,
6 => 0.709,
_ => 0.7213 / (1.0 + 1.079 / (1 << p) as f64)
}
}
fn bit_length(x: u64) -> u8 {
let mut bits: u8 = 0;
let mut xm = x;
while xm != 0 {
bits += 1;
xm >>= 1;
}
bits
}
fn get_rho(w: u64, max_width: u8) -> u8 {
let rho = max_width - HyperLogLog::bit_length(w) + 1;
assert!(rho > 0);
rho
}
fn vec_count_zero(v: &[u8]) -> uint {
v.iter().count(|&x| x == 0)
}
fn estimate_bias(E: f64, p: u8) -> f64 {
let bias_vector = BIAS_DATA[p - 4];
let nearest_neighbors =
HyperLogLog::get_nearest_neighbors(E, RAW_ESTIMATE_DATA[p - 4]);
let sum = nearest_neighbors.iter().fold(0.0, |acc, &neighbor|
acc + bias_vector[neighbor]);
sum / nearest_neighbors.len() as f64
}
fn get_nearest_neighbors(E: f64, estimate_vector: &[f64]) -> ~[uint] {
let ev_len = estimate_vector.len();
let mut r: ~[(f64, uint)] = vec::from_elem(ev_len, (0.0, 0u));
for i in range(0u, ev_len) {
let dr = E - estimate_vector[i];
r[i] = (dr * dr, i);
}
r.sort_by(|a, b|
if a < b { Less } else if a > b { Greater } else { Equal });
let top = r.slice(0, 6);
top.map(|&ez| *ez.second_ref())
}
fn ep(&self) -> f64 {
let sum = self.M.iter().fold(0.0, |acc, &x|
acc + num::powf(2.0, -(x as f64)));
let E = self.alpha * (self.m * self.m) as f64 / sum;
if E <= (5 * self.m) as f64 {
E - HyperLogLog::estimate_bias(E, self.p)
} else {
E
}
}
}
#[test]
fn hyperloglog_test_simple() {
let mut hll = HyperLogLog::new(0.00408);
let keys = ["test1", "test2", "test3", "test2", "test2", "test2"];
for &k in keys.iter() {
hll.insert(k);
}
assert!(hll.len().round() == 3.0);
hll.clear();
assert!(hll.len().round() == 0.0);
}
#[test]
fn hyperloglog_test_merge() {
let mut hll = HyperLogLog::new(0.00408);
let keys = ["test1", "test2", "test3", "test2", "test2", "test2"];
for &k in keys.iter() {
hll.insert(k);
}
assert!(hll.len().round() == 3.0);
let mut hll2 = HyperLogLog::new_from_template(&hll);
let keys2 = ["test3", "test4", "test4", "test4", "test4", "test1"];
for &k in keys2.iter() {
hll2.insert(k);
}
assert!(hll2.len().round() == 3.0);
hll.merge(&hll2);
assert!(hll.len().round() == 4.0);
}
second_ref() is gone
// (C)opyleft 2013 Frank Denis
/*!
* HyperLogLog implementation for Rust
*/
#[desc = "A hyperloglog implementation."];
#[license = "BSD"];
#[crate_id = "hyperloglog#0.1"];
#[crate_type = "rlib"];
#[warn(non_camel_case_types,
non_uppercase_statics,
unnecessary_qualification,
managed_heap_memory)];
extern crate extra;
use std::num;
use std::rand;
use std::vec;
static TRESHOLD_DATA: [f64, ..15] =
[10.0, 20.0, 40.0, 80.0, 220.0, 400.0, 900.0, 1800.0, 3100.0, 6500.0,
11500.0, 20000.0, 50000.0, 120000.0, 350000.0];
static RAW_ESTIMATE_DATA: &'static [&'static [f64]] =
&[&[11.0, 11.717, 12.207, 12.7896, 13.2882, 13.8204, 14.3772, 14.9342,
15.5202, 16.161, 16.7722, 17.4636, 18.0396, 18.6766, 19.3566, 20.0454,
20.7936, 21.4856, 22.2666, 22.9946, 23.766, 24.4692, 25.3638, 26.0764,
26.7864, 27.7602, 28.4814, 29.433, 30.2926, 31.0664, 31.9996, 32.7956,
33.5366, 34.5894, 35.5738, 36.2698, 37.3682, 38.0544, 39.2342,
40.0108, 40.7966, 41.9298, 42.8704, 43.6358, 44.5194, 45.773, 46.6772,
47.6174, 48.4888, 49.3304, 50.2506, 51.4996, 52.3824, 53.3078,
54.3984, 55.5838, 56.6618, 57.2174, 58.3514, 59.0802, 60.1482,
61.0376, 62.3598, 62.8078, 63.9744, 64.914, 65.781, 67.1806, 68.0594,
68.8446, 69.7928, 70.8248, 71.8324, 72.8598, 73.6246, 74.7014, 75.393,
76.6708, 77.2394],
&[23.0, 23.1194, 23.8208, 24.2318, 24.77, 25.2436, 25.7774, 26.2848,
26.8224, 27.3742, 27.9336, 28.503, 29.0494, 29.6292, 30.2124, 30.798,
31.367, 31.9728, 32.5944, 33.217, 33.8438, 34.3696, 35.0956, 35.7044,
36.324, 37.0668, 37.6698, 38.3644, 39.049, 39.6918, 40.4146, 41.082,
41.687, 42.5398, 43.2462, 43.857, 44.6606, 45.4168, 46.1248, 46.9222,
47.6804, 48.447, 49.3454, 49.9594, 50.7636, 51.5776, 52.331, 53.19,
53.9676, 54.7564, 55.5314, 56.4442, 57.3708, 57.9774, 58.9624,
59.8796, 60.755, 61.472, 62.2076, 63.1024, 63.8908, 64.7338, 65.7728,
66.629, 67.413, 68.3266, 69.1524, 70.2642, 71.1806, 72.0566, 72.9192,
73.7598, 74.3516, 75.5802, 76.4386, 77.4916, 78.1524, 79.1892,
79.8414, 80.8798, 81.8376, 82.4698, 83.7656, 84.331, 85.5914, 86.6012,
87.7016, 88.5582, 89.3394, 90.3544, 91.4912, 92.308, 93.3552, 93.9746,
95.2052, 95.727, 97.1322, 98.3944, 98.7588, 100.242, 101.1914,
102.2538, 102.8776, 103.6292, 105.1932, 105.9152, 107.0868, 107.6728,
108.7144, 110.3114, 110.8716, 111.245, 112.7908, 113.7064, 114.636,
115.7464, 116.1788, 117.7464, 118.4896, 119.6166, 120.5082, 121.7798,
122.9028, 123.4426, 124.8854, 125.705, 126.4652, 128.3464, 128.3462,
130.0398, 131.0342, 131.0042, 132.4766, 133.511, 134.7252, 135.425,
136.5172, 138.0572, 138.6694, 139.3712, 140.8598, 141.4594, 142.554,
143.4006, 144.7374, 146.1634, 146.8994, 147.605, 147.9304, 149.1636,
150.2468, 151.5876, 152.2096, 153.7032, 154.7146, 155.807, 156.9228,
157.0372, 158.5852],
&[46.0, 46.1902, 47.271, 47.8358, 48.8142, 49.2854, 50.317, 51.354,
51.8924, 52.9436, 53.4596, 54.5262, 55.6248, 56.1574, 57.2822, 57.837,
58.9636, 60.074, 60.7042, 61.7976, 62.4772, 63.6564, 64.7942, 65.5004,
66.686, 67.291, 68.5672, 69.8556, 70.4982, 71.8204, 72.4252, 73.7744,
75.0786, 75.8344, 77.0294, 77.8098, 79.0794, 80.5732, 81.1878,
82.5648, 83.2902, 84.6784, 85.3352, 86.8946, 88.3712, 89.0852, 90.499,
91.2686, 92.6844, 94.2234, 94.9732, 96.3356, 97.2286, 98.7262,
100.3284, 101.1048, 102.5962, 103.3562, 105.1272, 106.4184, 107.4974,
109.0822, 109.856, 111.48, 113.2834, 114.0208, 115.637, 116.5174,
118.0576, 119.7476, 120.427, 122.1326, 123.2372, 125.2788, 126.6776,
127.7926, 129.1952, 129.9564, 131.6454, 133.87, 134.5428, 136.2,
137.0294, 138.6278, 139.6782, 141.792, 143.3516, 144.2832, 146.0394,
147.0748, 148.4912, 150.849, 151.696, 153.5404, 154.073, 156.3714,
157.7216, 158.7328, 160.4208, 161.4184, 163.9424, 165.2772, 166.411,
168.1308, 168.769, 170.9258, 172.6828, 173.7502, 175.706, 176.3886,
179.0186, 180.4518, 181.927, 183.4172, 184.4114, 186.033, 188.5124,
189.5564, 191.6008, 192.4172, 193.8044, 194.997, 197.4548, 198.8948,
200.2346, 202.3086, 203.1548, 204.8842, 206.6508, 206.6772, 209.7254,
210.4752, 212.7228, 214.6614, 215.1676, 217.793, 218.0006, 219.9052,
221.66, 223.5588, 225.1636, 225.6882, 227.7126, 229.4502, 231.1978,
232.9756, 233.1654, 236.727, 238.1974, 237.7474, 241.1346, 242.3048,
244.1948, 245.3134, 246.879, 249.1204, 249.853, 252.6792, 253.857,
254.4486, 257.2362, 257.9534, 260.0286, 260.5632, 262.663, 264.723,
265.7566, 267.2566, 267.1624, 270.62, 272.8216, 273.2166, 275.2056,
276.2202, 278.3726, 280.3344, 281.9284, 283.9728, 284.1924, 286.4872,
287.587, 289.807, 291.1206, 292.769, 294.8708, 296.665, 297.1182,
299.4012, 300.6352, 302.1354, 304.1756, 306.1606, 307.3462, 308.5214,
309.4134, 310.8352, 313.9684, 315.837, 316.7796, 318.9858],
&[92.0, 93.4934, 94.9758, 96.4574, 97.9718, 99.4954, 101.5302, 103.0756,
104.6374, 106.1782, 107.7888, 109.9522, 111.592, 113.2532, 114.9086,
116.5938, 118.9474, 120.6796, 122.4394, 124.2176, 125.9768, 128.4214,
130.2528, 132.0102, 133.8658, 135.7278, 138.3044, 140.1316, 142.093,
144.0032, 145.9092, 148.6306, 150.5294, 152.5756, 154.6508, 156.662,
159.552, 161.3724, 163.617, 165.5754, 167.7872, 169.8444, 172.7988,
174.8606, 177.2118, 179.3566, 181.4476, 184.5882, 186.6816, 189.0824,
191.0258, 193.6048, 196.4436, 198.7274, 200.957, 203.147, 205.4364,
208.7592, 211.3386, 213.781, 215.8028, 218.656, 221.6544, 223.996,
226.4718, 229.1544, 231.6098, 234.5956, 237.0616, 239.5758, 242.4878,
244.5244, 248.2146, 250.724, 252.8722, 255.5198, 258.0414, 261.941,
264.9048, 266.87, 269.4304, 272.028, 274.4708, 278.37, 281.0624,
283.4668, 286.5532, 289.4352, 293.2564, 295.2744, 298.2118, 300.7472,
304.1456, 307.2928, 309.7504, 312.5528, 315.979, 318.2102, 322.1834,
324.3494, 327.325, 330.6614, 332.903, 337.2544, 339.9042, 343.215,
345.2864, 348.0814, 352.6764, 355.301, 357.139, 360.658, 363.1732,
366.5902, 369.9538, 373.0828, 375.922, 378.9902, 382.7328, 386.4538,
388.1136, 391.2234, 394.0878, 396.708, 401.1556, 404.1852, 406.6372,
409.6822, 412.7796, 416.6078, 418.4916, 422.131, 424.5376, 428.1988,
432.211, 434.4502, 438.5282, 440.912, 444.0448, 447.7432, 450.8524,
453.7988, 456.7858, 458.8868, 463.9886, 466.5064, 468.9124, 472.6616,
475.4682, 478.582, 481.304, 485.2738, 488.6894, 490.329, 496.106,
497.6908, 501.1374, 504.5322, 506.8848, 510.3324, 513.4512, 516.179,
520.4412, 522.6066, 526.167, 528.7794, 533.379, 536.067, 538.46,
542.9116, 545.692, 547.9546, 552.493, 555.2722, 557.335, 562.449,
564.2014, 569.0738, 571.0974, 574.8564, 578.2996, 581.409, 583.9704,
585.8098, 589.6528, 594.5998, 595.958, 600.068, 603.3278, 608.2016,
609.9632, 612.864, 615.43, 620.7794, 621.272, 625.8644, 629.206,
633.219, 634.5154, 638.6102],
&[184.2152, 187.2454, 190.2096, 193.6652, 196.6312, 199.6822, 203.249,
206.3296, 210.0038, 213.2074, 216.4612, 220.27, 223.5178, 227.4412,
230.8032, 234.1634, 238.1688, 241.6074, 245.6946, 249.2664, 252.8228,
257.0432, 260.6824, 264.9464, 268.6268, 272.2626, 276.8376, 280.4034,
284.8956, 288.8522, 292.7638, 297.3552, 301.3556, 305.7526, 309.9292,
313.8954, 318.8198, 322.7668, 327.298, 331.6688, 335.9466, 340.9746,
345.1672, 349.3474, 354.3028, 358.8912, 364.114, 368.4646, 372.9744,
378.4092, 382.6022, 387.843, 392.5684, 397.1652, 402.5426, 407.4152,
412.5388, 417.3592, 422.1366, 427.486, 432.3918, 437.5076, 442.509,
447.3834, 453.3498, 458.0668, 463.7346, 469.1228, 473.4528, 479.7,
484.644, 491.0518, 495.5774, 500.9068, 506.432, 512.1666, 517.434,
522.6644, 527.4894, 533.6312, 538.3804, 544.292, 550.5496, 556.0234,
562.8206, 566.6146, 572.4188, 579.117, 583.6762, 590.6576, 595.7864,
601.509, 607.5334, 612.9204, 619.772, 624.2924, 630.8654, 636.1836,
642.745, 649.1316, 655.0386, 660.0136, 666.6342, 671.6196, 678.1866,
684.4282, 689.3324, 695.4794, 702.5038, 708.129, 713.528, 720.3204,
726.463, 732.7928, 739.123, 744.7418, 751.2192, 756.5102, 762.6066,
769.0184, 775.2224, 781.4014, 787.7618, 794.1436, 798.6506, 805.6378,
811.766, 819.7514, 824.5776, 828.7322, 837.8048, 843.6302, 849.9336,
854.4798, 861.3388, 867.9894, 873.8196, 880.3136, 886.2308, 892.4588,
899.0816, 905.4076, 912.0064, 917.3878, 923.619, 929.998, 937.3482,
943.9506, 947.991, 955.1144, 962.203, 968.8222, 975.7324, 981.7826,
988.7666, 994.2648, 1000.3128, 1007.4082, 1013.7536, 1020.3376,
1026.7156, 1031.7478, 1037.4292, 1045.393, 1051.2278, 1058.3434,
1062.8726, 1071.884, 1076.806, 1082.9176, 1089.1678, 1095.5032,
1102.525, 1107.2264, 1115.315, 1120.93, 1127.252, 1134.1496,
1139.0408, 1147.5448, 1153.3296, 1158.1974, 1166.5262, 1174.3328,
1175.657, 1184.4222, 1190.9172, 1197.1292, 1204.4606, 1210.4578,
1218.8728, 1225.3336, 1226.6592, 1236.5768, 1241.363, 1249.4074,
1254.6566, 1260.8014, 1266.5454, 1274.5192],
&[369.0, 374.8294, 381.2452, 387.6698, 394.1464, 400.2024, 406.8782,
413.6598, 420.462, 427.2826, 433.7102, 440.7416, 447.9366, 455.1046,
462.285, 469.0668, 476.306, 483.8448, 491.301, 498.9886, 506.2422,
513.8138, 521.7074, 529.7428, 537.8402, 545.1664, 553.3534, 561.594,
569.6886, 577.7876, 585.65, 594.228, 602.8036, 611.1666, 620.0818,
628.0824, 637.2574, 646.302, 655.1644, 664.0056, 672.3802, 681.7192,
690.5234, 700.2084, 708.831, 718.485, 728.1112, 737.4764, 746.76,
756.3368, 766.5538, 775.5058, 785.2646, 795.5902, 804.3818, 814.8998,
824.9532, 835.2062, 845.2798, 854.4728, 864.9582, 875.3292, 886.171,
896.781, 906.5716, 916.7048, 927.5322, 937.875, 949.3972, 958.3464,
969.7274, 980.2834, 992.1444, 1003.4264, 1013.0166, 1024.018,
1035.0438, 1046.34, 1057.6856, 1068.9836, 1079.0312, 1091.677,
1102.3188, 1113.4846, 1124.4424, 1135.739, 1147.1488, 1158.9202,
1169.406, 1181.5342, 1193.2834, 1203.8954, 1216.3286, 1226.2146,
1239.6684, 1251.9946, 1262.123, 1275.4338, 1285.7378, 1296.076,
1308.9692, 1320.4964, 1333.0998, 1343.9864, 1357.7754, 1368.3208,
1380.4838, 1392.7388, 1406.0758, 1416.9098, 1428.9728, 1440.9228,
1453.9292, 1462.617, 1476.05, 1490.2996, 1500.6128, 1513.7392,
1524.5174, 1536.6322, 1548.2584, 1562.3766, 1572.423, 1587.1232,
1596.5164, 1610.5938, 1622.5972, 1633.1222, 1647.7674, 1658.5044,
1671.57, 1683.7044, 1695.4142, 1708.7102, 1720.6094, 1732.6522,
1747.841, 1756.4072, 1769.9786, 1782.3276, 1797.5216, 1808.3186,
1819.0694, 1834.354, 1844.575, 1856.2808, 1871.1288, 1880.7852,
1893.9622, 1906.3418, 1920.6548, 1932.9302, 1945.8584, 1955.473,
1968.8248, 1980.6446, 1995.9598, 2008.349, 2019.8556, 2033.0334,
2044.0206, 2059.3956, 2069.9174, 2082.6084, 2093.7036, 2106.6108,
2118.9124, 2132.301, 2144.7628, 2159.8422, 2171.0212, 2183.101,
2193.5112, 2208.052, 2221.3194, 2233.3282, 2247.295, 2257.7222,
2273.342, 2286.5638, 2299.6786, 2310.8114, 2322.3312, 2335.516,
2349.874, 2363.5968, 2373.865, 2387.1918, 2401.8328, 2414.8496,
2424.544, 2436.7592, 2447.1682, 2464.1958, 2474.3438, 2489.0006,
2497.4526, 2513.6586, 2527.19, 2540.7028, 2553.768],
&[738.1256, 750.4234, 763.1064, 775.4732, 788.4636, 801.0644, 814.488,
827.9654, 841.0832, 854.7864, 868.1992, 882.2176, 896.5228, 910.1716,
924.7752, 938.899, 953.6126, 968.6492, 982.9474, 998.5214, 1013.1064,
1028.6364, 1044.2468, 1059.4588, 1075.3832, 1091.0584, 1106.8606,
1123.3868, 1139.5062, 1156.1862, 1172.463, 1189.339, 1206.1936,
1223.1292, 1240.1854, 1257.2908, 1275.3324, 1292.8518, 1310.5204,
1328.4854, 1345.9318, 1364.552, 1381.4658, 1400.4256, 1419.849,
1438.152, 1456.8956, 1474.8792, 1494.118, 1513.62, 1532.5132,
1551.9322, 1570.7726, 1590.6086, 1610.5332, 1630.5918, 1650.4294,
1669.7662, 1690.4106, 1710.7338, 1730.9012, 1750.4486, 1770.1556,
1791.6338, 1812.7312, 1833.6264, 1853.9526, 1874.8742, 1896.8326,
1918.1966, 1939.5594, 1961.07, 1983.037, 2003.1804, 2026.071,
2047.4884, 2070.0848, 2091.2944, 2114.333, 2135.9626, 2158.2902,
2181.0814, 2202.0334, 2224.4832, 2246.39, 2269.7202, 2292.1714,
2314.2358, 2338.9346, 2360.891, 2384.0264, 2408.3834, 2430.1544,
2454.8684, 2476.9896, 2501.4368, 2522.8702, 2548.0408, 2570.6738,
2593.5208, 2617.0158, 2640.2302, 2664.0962, 2687.4986, 2714.2588,
2735.3914, 2759.6244, 2781.8378, 2808.0072, 2830.6516, 2856.2454,
2877.2136, 2903.4546, 2926.785, 2951.2294, 2976.468, 3000.867,
3023.6508, 3049.91, 3073.5984, 3098.162, 3121.5564, 3146.2328,
3170.9484, 3195.5902, 3221.3346, 3242.7032, 3271.6112, 3296.5546,
3317.7376, 3345.072, 3369.9518, 3394.326, 3418.1818, 3444.6926,
3469.086, 3494.2754, 3517.8698, 3544.248, 3565.3768, 3588.7234,
3616.979, 3643.7504, 3668.6812, 3695.72, 3719.7392, 3742.6224,
3770.4456, 3795.6602, 3819.9058, 3844.002, 3869.517, 3895.6824,
3920.8622, 3947.1364, 3973.985, 3995.4772, 4021.62, 4046.628, 4074.65,
4096.2256, 4121.831, 4146.6406, 4173.276, 4195.0744, 4223.9696,
4251.3708, 4272.9966, 4300.8046, 4326.302, 4353.1248, 4374.312,
4403.0322, 4426.819, 4450.0598, 4478.5206, 4504.8116, 4528.8928,
4553.9584, 4578.8712, 4603.8384, 4632.3872, 4655.5128, 4675.821,
4704.6222, 4731.9862, 4755.4174, 4781.2628, 4804.332, 4832.3048,
4862.8752, 4883.4148, 4906.9544, 4935.3516, 4954.3532, 4984.0248,
5011.217, 5035.3258, 5057.3672, 5084.1828],
&[1477.0, 1501.6014, 1526.5802, 1551.7942, 1577.3042, 1603.2062,
1629.8402, 1656.2292, 1682.9462, 1709.9926, 1737.3026, 1765.4252,
1793.0578, 1821.6092, 1849.626, 1878.5568, 1908.527, 1937.5154,
1967.1874, 1997.3878, 2027.37, 2058.1972, 2089.5728, 2120.1012,
2151.9668, 2183.292, 2216.0772, 2247.8578, 2280.6562, 2313.041,
2345.714, 2380.3112, 2414.1806, 2447.9854, 2481.656, 2516.346,
2551.5154, 2586.8378, 2621.7448, 2656.6722, 2693.5722, 2729.1462,
2765.4124, 2802.8728, 2838.898, 2876.408, 2913.4926, 2951.4938,
2989.6776, 3026.282, 3065.7704, 3104.1012, 3143.7388, 3181.6876,
3221.1872, 3261.5048, 3300.0214, 3339.806, 3381.409, 3421.4144,
3461.4294, 3502.2286, 3544.651, 3586.6156, 3627.337, 3670.083,
3711.1538, 3753.5094, 3797.01, 3838.6686, 3882.1678, 3922.8116,
3967.9978, 4009.9204, 4054.3286, 4097.5706, 4140.6014, 4185.544,
4229.5976, 4274.583, 4316.9438, 4361.672, 4406.2786, 4451.8628,
4496.1834, 4543.505, 4589.1816, 4632.5188, 4678.2294, 4724.8908,
4769.0194, 4817.052, 4861.4588, 4910.1596, 4956.4344, 5002.5238,
5048.13, 5093.6374, 5142.8162, 5187.7894, 5237.3984, 5285.6078,
5331.0858, 5379.1036, 5428.6258, 5474.6018, 5522.7618, 5571.5822,
5618.59, 5667.9992, 5714.88, 5763.454, 5808.6982, 5860.3644,
5910.2914, 5953.571, 6005.9232, 6055.1914, 6104.5882, 6154.5702,
6199.7036, 6251.1764, 6298.7596, 6350.0302, 6398.061, 6448.4694,
6495.933, 6548.0474, 6597.7166, 6646.9416, 6695.9208, 6742.6328,
6793.5276, 6842.1934, 6894.2372, 6945.3864, 6996.9228, 7044.2372,
7094.1374, 7142.2272, 7192.2942, 7238.8338, 7288.9006, 7344.0908,
7394.8544, 7443.5176, 7490.4148, 7542.9314, 7595.6738, 7641.9878,
7694.3688, 7743.0448, 7797.522, 7845.53, 7899.594, 7950.3132,
7996.455, 8050.9442, 8092.9114, 8153.1374, 8197.4472, 8252.8278,
8301.8728, 8348.6776, 8401.4698, 8453.551, 8504.6598, 8553.8944,
8604.1276, 8657.6514, 8710.3062, 8758.908, 8807.8706, 8862.1702,
8910.4668, 8960.77, 9007.2766, 9063.164, 9121.0534, 9164.1354,
9218.1594, 9267.767, 9319.0594, 9372.155, 9419.7126, 9474.3722,
9520.1338, 9572.368, 9622.7702, 9675.8448, 9726.5396, 9778.7378,
9827.6554, 9878.1922, 9928.7782, 9978.3984, 10026.578, 10076.5626,
10137.1618, 10177.5244, 10229.9176],
&[2954.0, 3003.4782, 3053.3568, 3104.3666, 3155.324, 3206.9598,
3259.648, 3312.539, 3366.1474, 3420.2576, 3474.8376, 3530.6076,
3586.451, 3643.38, 3700.4104, 3757.5638, 3815.9676, 3875.193,
3934.838, 3994.8548, 4055.018, 4117.1742, 4178.4482, 4241.1294,
4304.4776, 4367.4044, 4431.8724, 4496.3732, 4561.4304, 4627.5326,
4693.949, 4761.5532, 4828.7256, 4897.6182, 4965.5186, 5034.4528,
5104.865, 5174.7164, 5244.6828, 5316.6708, 5387.8312, 5459.9036,
5532.476, 5604.8652, 5679.6718, 5753.757, 5830.2072, 5905.2828,
5980.0434, 6056.6264, 6134.3192, 6211.5746, 6290.0816, 6367.1176,
6447.9796, 6526.5576, 6606.1858, 6686.9144, 6766.1142, 6847.0818,
6927.9664, 7010.9096, 7091.0816, 7175.3962, 7260.3454, 7344.018,
7426.4214, 7511.3106, 7596.0686, 7679.8094, 7765.818, 7852.4248,
7936.834, 8022.363, 8109.5066, 8200.4554, 8288.5832, 8373.366,
8463.4808, 8549.7682, 8642.0522, 8728.3288, 8820.9528, 8907.727,
9001.0794, 9091.2522, 9179.988, 9269.852, 9362.6394, 9453.642,
9546.9024, 9640.6616, 9732.6622, 9824.3254, 9917.7484, 10007.9392,
10106.7508, 10196.2152, 10289.8114, 10383.5494, 10482.3064,
10576.8734, 10668.7872, 10764.7156, 10862.0196, 10952.793, 11049.9748,
11146.0702, 11241.4492, 11339.2772, 11434.2336, 11530.741, 11627.6136,
11726.311, 11821.5964, 11918.837, 12015.3724, 12113.0162, 12213.0424,
12306.9804, 12408.4518, 12504.8968, 12604.586, 12700.9332, 12798.705,
12898.5142, 12997.0488, 13094.788, 13198.475, 13292.7764, 13392.9698,
13486.8574, 13590.1616, 13686.5838, 13783.6264, 13887.2638,
13992.0978, 14081.0844, 14189.9956, 14280.0912, 14382.4956,
14486.4384, 14588.1082, 14686.2392, 14782.276, 14888.0284, 14985.1864,
15088.8596, 15187.0998, 15285.027, 15383.6694, 15495.8266, 15591.3736,
15694.2008, 15790.3246, 15898.4116, 15997.4522, 16095.5014,
16198.8514, 16291.7492, 16402.6424, 16499.1266, 16606.2436,
16697.7186, 16796.3946, 16902.3376, 17005.7672, 17100.814, 17206.8282,
17305.8262, 17416.0744, 17508.4092, 17617.0178, 17715.4554, 17816.758,
17920.1748, 18012.9236, 18119.7984, 18223.2248, 18324.2482,
18426.6276, 18525.0932, 18629.8976, 18733.2588, 18831.0466,
18940.1366, 19032.2696, 19131.729, 19243.4864, 19349.6932, 19442.866,
19547.9448, 19653.2798, 19754.4034, 19854.0692, 19965.1224,
20065.1774, 20158.2212, 20253.353, 20366.3264, 20463.22],
&[5908.5052, 6007.2672, 6107.347, 6208.5794, 6311.2622, 6414.5514,
6519.3376, 6625.6952, 6732.5988, 6841.3552, 6950.5972, 7061.3082,
7173.5646, 7287.109, 7401.8216, 7516.4344, 7633.3802, 7751.2962,
7870.3784, 7990.292, 8110.79, 8233.4574, 8356.6036, 8482.2712,
8607.7708, 8735.099, 8863.1858, 8993.4746, 9123.8496, 9255.6794,
9388.5448, 9522.7516, 9657.3106, 9792.6094, 9930.5642, 10068.794,
10206.7256, 10347.81, 10490.3196, 10632.0778, 10775.9916, 10920.4662,
11066.124, 11213.073, 11358.0362, 11508.1006, 11659.1716, 11808.7514,
11959.4884, 12112.1314, 12265.037, 12420.3756, 12578.933, 12734.311,
12890.0006, 13047.2144, 13207.3096, 13368.5144, 13528.024, 13689.847,
13852.7528, 14018.3168, 14180.5372, 14346.9668, 14513.5074, 14677.867,
14846.2186, 15017.4186, 15184.9716, 15356.339, 15529.2972, 15697.3578,
15871.8686, 16042.187, 16216.4094, 16389.4188, 16565.9126, 16742.3272,
16919.0042, 17094.7592, 17273.965, 17451.8342, 17634.4254, 17810.5984,
17988.9242, 18171.051, 18354.7938, 18539.466, 18721.0408, 18904.9972,
19081.867, 19271.9118, 19451.8694, 19637.9816, 19821.2922, 20013.1292,
20199.3858, 20387.8726, 20572.9514, 20770.7764, 20955.1714, 21144.751,
21329.9952, 21520.709, 21712.7016, 21906.3868, 22096.2626, 22286.0524,
22475.051, 22665.5098, 22862.8492, 23055.5294, 23249.6138, 23437.848,
23636.273, 23826.093, 24020.3296, 24213.3896, 24411.7392, 24602.9614,
24805.7952, 24998.1552, 25193.9588, 25389.0166, 25585.8392,
25780.6976, 25981.2728, 26175.977, 26376.5252, 26570.1964, 26773.387,
26962.9812, 27163.0586, 27368.164, 27565.0534, 27758.7428, 27961.1276,
28163.2324, 28362.3816, 28565.7668, 28758.644, 28956.9768, 29163.4722,
29354.7026, 29561.1186, 29767.9948, 29959.9986, 30164.0492,
30366.9818, 30562.5338, 30762.9928, 30976.1592, 31166.274, 31376.722,
31570.3734, 31770.809, 31974.8934, 32179.5286, 32387.5442, 32582.3504,
32794.076, 32989.9528, 33191.842, 33392.4684, 33595.659, 33801.8672,
34000.3414, 34200.0922, 34402.6792, 34610.0638, 34804.0084, 35011.13,
35218.669, 35418.6634, 35619.0792, 35830.6534, 36028.4966, 36229.7902,
36438.6422, 36630.7764, 36833.3102, 37048.6728, 37247.3916,
37453.5904, 37669.3614, 37854.5526, 38059.305, 38268.0936, 38470.2516,
38674.7064, 38876.167, 39068.3794, 39281.9144, 39492.8566, 39684.8628,
39898.4108, 40093.1836, 40297.6858, 40489.7086, 40717.2424],
&[11817.475, 12015.0046, 12215.3792, 12417.7504, 12623.1814, 12830.0086,
13040.0072, 13252.503, 13466.178, 13683.2738, 13902.0344, 14123.9798,
14347.394, 14573.7784, 14802.6894, 15033.6824, 15266.9134, 15502.8624,
15741.4944, 15980.7956, 16223.8916, 16468.6316, 16715.733, 16965.5726,
17217.204, 17470.666, 17727.8516, 17986.7886, 18247.6902, 18510.9632,
18775.304, 19044.7486, 19314.4408, 19587.202, 19862.2576, 20135.924,
20417.0324, 20697.9788, 20979.6112, 21265.0274, 21550.723, 21841.6906,
22132.162, 22428.1406, 22722.127, 23020.5606, 23319.7394, 23620.4014,
23925.2728, 24226.9224, 24535.581, 24845.505, 25155.9618, 25470.3828,
25785.9702, 26103.7764, 26420.4132, 26742.0186, 27062.8852, 27388.415,
27714.6024, 28042.296, 28365.4494, 28701.1526, 29031.8008, 29364.2156,
29704.497, 30037.1458, 30380.111, 30723.8168, 31059.5114, 31404.9498,
31751.6752, 32095.2686, 32444.7792, 32794.767, 33145.204, 33498.4226,
33847.6502, 34209.006, 34560.849, 34919.4838, 35274.9778, 35635.1322,
35996.3266, 36359.1394, 36722.8266, 37082.8516, 37447.7354,
37815.9606, 38191.0692, 38559.4106, 38924.8112, 39294.6726, 39663.973,
40042.261, 40416.2036, 40779.2036, 41161.6436, 41540.9014, 41921.1998,
42294.7698, 42678.5264, 43061.3464, 43432.375, 43818.432, 44198.6598,
44583.0138, 44970.4794, 45353.924, 45729.858, 46118.2224, 46511.5724,
46900.7386, 47280.6964, 47668.1472, 48055.6796, 48446.9436,
48838.7146, 49217.7296, 49613.7796, 50010.7508, 50410.0208,
50793.7886, 51190.2456, 51583.1882, 51971.0796, 52376.5338, 52763.319,
53165.5534, 53556.5594, 53948.2702, 54346.352, 54748.7914, 55138.577,
55543.4824, 55941.1748, 56333.7746, 56745.1552, 57142.7944,
57545.2236, 57935.9956, 58348.5268, 58737.5474, 59158.5962,
59542.6896, 59958.8004, 60349.3788, 60755.0212, 61147.6144, 61548.194,
61946.0696, 62348.6042, 62763.603, 63162.781, 63560.635, 63974.3482,
64366.4908, 64771.5876, 65176.7346, 65597.3916, 65995.915, 66394.0384,
66822.9396, 67203.6336, 67612.2032, 68019.0078, 68420.0388, 68821.22,
69235.8388, 69640.0724, 70055.155, 70466.357, 70863.4266, 71276.2482,
71677.0306, 72080.2006, 72493.0214, 72893.5952, 73314.5856,
73714.9852, 74125.3022, 74521.2122, 74933.6814, 75341.5904,
75743.0244, 76166.0278, 76572.1322, 76973.1028, 77381.6284,
77800.6092, 78189.328, 78607.0962, 79012.2508, 79407.8358, 79825.725,
80238.701, 80646.891, 81035.6436, 81460.0448, 81876.3884],
&[23635.0036, 24030.8034, 24431.4744, 24837.1524, 25246.7928, 25661.326,
26081.3532, 26505.2806, 26933.9892, 27367.7098, 27805.318, 28248.799,
28696.4382, 29148.8244, 29605.5138, 30066.8668, 30534.2344, 31006.32,
31480.778, 31962.2418, 32447.3324, 32938.0232, 33432.731, 33930.728,
34433.9896, 34944.1402, 35457.5588, 35974.5958, 36497.3296,
37021.9096, 37554.326, 38088.0826, 38628.8816, 39171.3192, 39723.2326,
40274.5554, 40832.3142, 41390.613, 41959.5908, 42532.5466, 43102.0344,
43683.5072, 44266.694, 44851.2822, 45440.7862, 46038.0586, 46640.3164,
47241.064, 47846.155, 48454.7396, 49076.9168, 49692.542, 50317.4778,
50939.65, 51572.5596, 52210.2906, 52843.7396, 53481.3996, 54127.236,
54770.406, 55422.6598, 56078.7958, 56736.7174, 57397.6784, 58064.5784,
58730.308, 59404.9784, 60077.0864, 60751.9158, 61444.1386, 62115.817,
62808.7742, 63501.4774, 64187.5454, 64883.6622, 65582.7468,
66274.5318, 66976.9276, 67688.7764, 68402.138, 69109.6274, 69822.9706,
70543.6108, 71265.5202, 71983.3848, 72708.4656, 73433.384, 74158.4664,
74896.4868, 75620.9564, 76362.1434, 77098.3204, 77835.7662,
78582.6114, 79323.9902, 80067.8658, 80814.9246, 81567.0136,
82310.8536, 83061.9952, 83821.4096, 84580.8608, 85335.547, 86092.5802,
86851.6506, 87612.311, 88381.2016, 89146.3296, 89907.8974, 90676.846,
91451.4152, 92224.5518, 92995.8686, 93763.5066, 94551.2796,
95315.1944, 96096.1806, 96881.0918, 97665.679, 98442.68, 99229.3002,
100011.0994, 100790.6386, 101580.1564, 102377.7484, 103152.1392,
103944.2712, 104730.216, 105528.6336, 106324.9398, 107117.6706,
107890.3988, 108695.2266, 109485.238, 110294.7876, 111075.0958,
111878.0496, 112695.2864, 113464.5486, 114270.0474, 115068.608,
115884.3626, 116673.2588, 117483.3716, 118275.097, 119085.4092,
119879.2808, 120687.5868, 121499.9944, 122284.916, 123095.9254,
123912.5038, 124709.0454, 125503.7182, 126323.259, 127138.9412,
127943.8294, 128755.646, 129556.5354, 130375.3298, 131161.4734,
131971.1962, 132787.5458, 133588.1056, 134431.351, 135220.2906,
136023.398, 136846.6558, 137667.0004, 138463.663, 139283.7154,
140074.6146, 140901.3072, 141721.8548, 142543.2322, 143356.1096,
144173.7412, 144973.0948, 145794.3162, 146609.5714, 147420.003,
148237.9784, 149050.5696, 149854.761, 150663.1966, 151494.0754,
152313.1416, 153112.6902, 153935.7206, 154746.9262, 155559.547,
156401.9746, 157228.7036, 158008.7254, 158820.75, 159646.9184,
160470.4458, 161279.5348, 162093.3114, 162918.542, 163729.2842],
&[47271.0, 48062.3584, 48862.7074, 49673.152, 50492.8416, 51322.9514,
52161.03, 53009.407, 53867.6348, 54734.206, 55610.5144, 56496.2096,
57390.795, 58297.268, 59210.6448, 60134.665, 61068.0248, 62010.4472,
62962.5204, 63923.5742, 64895.0194, 65876.4182, 66862.6136,
67862.6968, 68868.8908, 69882.8544, 70911.271, 71944.0924, 72990.0326,
74040.692, 75100.6336, 76174.7826, 77252.5998, 78340.2974, 79438.2572,
80545.4976, 81657.2796, 82784.6336, 83915.515, 85059.7362, 86205.9368,
87364.4424, 88530.3358, 89707.3744, 90885.9638, 92080.197, 93275.5738,
94479.391, 95695.918, 96919.2236, 98148.4602, 99382.3474, 100625.6974,
101878.0284, 103141.6278, 104409.4588, 105686.2882, 106967.5402,
108261.6032, 109548.1578, 110852.0728, 112162.231, 113479.0072,
114806.2626, 116137.9072, 117469.5048, 118813.5186, 120165.4876,
121516.2556, 122875.766, 124250.5444, 125621.2222, 127003.2352,
128387.848, 129775.2644, 131181.7776, 132577.3086, 133979.9458,
135394.1132, 136800.9078, 138233.217, 139668.5308, 141085.212,
142535.2122, 143969.0684, 145420.2872, 146878.1542, 148332.7572,
149800.3202, 151269.66, 152743.6104, 154213.0948, 155690.288,
157169.4246, 158672.1756, 160160.059, 161650.6854, 163145.7772,
164645.6726, 166159.1952, 167682.1578, 169177.3328, 170700.0118,
172228.8964, 173732.6664, 175265.5556, 176787.799, 178317.111,
179856.6914, 181400.865, 182943.4612, 184486.742, 186033.4698,
187583.7886, 189148.1868, 190688.4526, 192250.1926, 193810.9042,
195354.2972, 196938.7682, 198493.5898, 200079.2824, 201618.912,
203205.5492, 204765.5798, 206356.1124, 207929.3064, 209498.7196,
211086.229, 212675.1324, 214256.7892, 215826.2392, 217412.8474,
218995.6724, 220618.6038, 222207.1166, 223781.0364, 225387.4332,
227005.7928, 228590.4336, 230217.8738, 231805.1054, 233408.9,
234995.3432, 236601.4956, 238190.7904, 239817.2548, 241411.2832,
243002.4066, 244640.1884, 246255.3128, 247849.3508, 249479.9734,
251106.8822, 252705.027, 254332.9242, 255935.129, 257526.9014,
259154.772, 260777.625, 262390.253, 264004.4906, 265643.59,
267255.4076, 268873.426, 270470.7252, 272106.4804, 273722.4456,
275337.794, 276945.7038, 278592.9154, 280204.3726, 281841.1606,
283489.171, 285130.1716, 286735.3362, 288364.7164, 289961.1814,
291595.5524, 293285.683, 294899.6668, 296499.3434, 298128.0462,
299761.8946, 301394.2424, 302997.6748, 304615.1478, 306269.7724,
307886.114, 309543.1028, 311153.2862, 312782.8546, 314421.2008,
316033.2438, 317692.9636, 319305.2648, 320948.7406, 322566.3364,
324228.4224, 325847.1542],
&[94542.0, 96125.811, 97728.019, 99348.558, 100987.9705, 102646.7565,
104324.5125, 106021.7435, 107736.7865, 109469.272, 111223.9465,
112995.219, 114787.432, 116593.152, 118422.71, 120267.2345,
122134.6765, 124020.937, 125927.2705, 127851.255, 129788.9485,
131751.016, 133726.8225, 135722.592, 137736.789, 139770.568,
141821.518, 143891.343, 145982.1415, 148095.387, 150207.526,
152355.649, 154515.6415, 156696.05, 158887.7575, 161098.159,
163329.852, 165569.053, 167837.4005, 170121.6165, 172420.4595,
174732.6265, 177062.77, 179412.502, 181774.035, 184151.939,
186551.6895, 188965.691, 191402.8095, 193857.949, 196305.0775,
198774.6715, 201271.2585, 203764.78, 206299.3695, 208818.1365,
211373.115, 213946.7465, 216532.076, 219105.541, 221714.5375,
224337.5135, 226977.5125, 229613.0655, 232270.2685, 234952.2065,
237645.3555, 240331.1925, 243034.517, 245756.0725, 248517.6865,
251232.737, 254011.3955, 256785.995, 259556.44, 262368.335,
265156.911, 267965.266, 270785.583, 273616.0495, 276487.4835,
279346.639, 282202.509, 285074.3885, 287942.2855, 290856.018,
293774.0345, 296678.5145, 299603.6355, 302552.6575, 305492.9785,
308466.8605, 311392.581, 314347.538, 317319.4295, 320285.9785,
323301.7325, 326298.3235, 329301.3105, 332301.987, 335309.791,
338370.762, 341382.923, 344431.1265, 347464.1545, 350507.28,
353619.2345, 356631.2005, 359685.203, 362776.7845, 365886.488,
368958.2255, 372060.6825, 375165.4335, 378237.935, 381328.311,
384430.5225, 387576.425, 390683.242, 393839.648, 396977.8425,
400101.9805, 403271.296, 406409.8425, 409529.5485, 412678.7,
415847.423, 419020.8035, 422157.081, 425337.749, 428479.6165,
431700.902, 434893.1915, 438049.582, 441210.5415, 444379.2545,
447577.356, 450741.931, 453959.548, 457137.0935, 460329.846,
463537.4815, 466732.3345, 469960.5615, 473164.681, 476347.6345,
479496.173, 482813.1645, 486025.6995, 489249.4885, 492460.1945,
495675.8805, 498908.0075, 502131.802, 505374.3855, 508550.9915,
511806.7305, 515026.776, 518217.0005, 521523.9855, 524705.9855,
527950.997, 531210.0265, 534472.497, 537750.7315, 540926.922,
544207.094, 547429.4345, 550666.3745, 553975.3475, 557150.7185,
560399.6165, 563662.697, 566916.7395, 570146.1215, 573447.425,
576689.6245, 579874.5745, 583202.337, 586503.0255, 589715.635,
592910.161, 596214.3885, 599488.035, 602740.92, 605983.0685,
609248.67, 612491.3605, 615787.912, 619107.5245, 622307.9555,
625577.333, 628840.4385, 632085.2155, 635317.6135, 638691.7195,
641887.467, 645139.9405, 648441.546, 651666.252, 654941.845],
&[189084.0, 192250.913, 195456.774, 198696.946, 201977.762, 205294.444,
208651.754, 212042.099, 215472.269, 218941.91, 222443.912, 225996.845,
229568.199, 233193.568, 236844.457, 240543.233, 244279.475, 248044.27,
251854.588, 255693.2, 259583.619, 263494.621, 267445.385, 271454.061,
275468.769, 279549.456, 283646.446, 287788.198, 291966.099,
296181.164, 300431.469, 304718.618, 309024.004, 313393.508,
317760.803, 322209.731, 326675.061, 331160.627, 335654.47, 340241.442,
344841.833, 349467.132, 354130.629, 358819.432, 363574.626,
368296.587, 373118.482, 377914.93, 382782.301, 387680.669, 392601.981,
397544.323, 402529.115, 407546.018, 412593.658, 417638.657,
422762.865, 427886.169, 433017.167, 438213.273, 443441.254,
448692.421, 453937.533, 459239.049, 464529.569, 469910.083, 475274.03,
480684.473, 486070.26, 491515.237, 496995.651, 502476.617, 507973.609,
513497.19, 519083.233, 524726.509, 530305.505, 535945.728, 541584.404,
547274.055, 552967.236, 558667.862, 564360.216, 570128.148, 575965.08,
581701.952, 587532.523, 593361.144, 599246.128, 605033.418,
610958.779, 616837.117, 622772.818, 628672.04, 634675.369, 640574.831,
646585.739, 652574.547, 658611.217, 664642.684, 670713.914,
676737.681, 682797.313, 688837.897, 694917.874, 701009.882,
707173.648, 713257.254, 719415.392, 725636.761, 731710.697,
737906.209, 744103.074, 750313.39, 756504.185, 762712.579, 768876.985,
775167.859, 781359.0, 787615.959, 793863.597, 800245.477, 806464.582,
812785.294, 819005.925, 825403.057, 831676.197, 837936.284,
844266.968, 850642.711, 856959.756, 863322.774, 869699.931,
876102.478, 882355.787, 888694.463, 895159.952, 901536.143,
907872.631, 914293.672, 920615.14, 927130.974, 933409.404, 939922.178,
946331.47, 952745.93, 959209.264, 965590.224, 972077.284, 978501.961,
984953.19, 991413.271, 997817.479, 1004222.658, 1010725.676,
1017177.138, 1023612.529, 1030098.236, 1036493.719, 1043112.207,
1049537.036, 1056008.096, 1062476.184, 1068942.337, 1075524.95,
1081932.864, 1088426.025, 1094776.005, 1101327.448, 1107901.673,
1114423.639, 1120884.602, 1127324.923, 1133794.24, 1140328.886,
1146849.376, 1153346.682, 1159836.502, 1166478.703, 1172953.304,
1179391.502, 1185950.982, 1192544.052, 1198913.41, 1205430.994,
1212015.525, 1218674.042, 1225121.683, 1231551.101, 1238126.379,
1244673.795, 1251260.649, 1257697.86, 1264320.983, 1270736.319,
1277274.694, 1283804.95, 1290211.514, 1296858.568, 1303455.691]];
static BIAS_DATA: &'static [&'static [f64]] =
&[&[10.0, 9.717, 9.207, 8.7896, 8.2882, 7.8204, 7.3772, 6.9342, 6.5202,
6.161, 5.7722, 5.4636, 5.0396, 4.6766, 4.3566, 4.0454, 3.7936, 3.4856,
3.2666, 2.9946, 2.766, 2.4692, 2.3638, 2.0764, 1.7864, 1.7602, 1.4814,
1.433, 1.2926, 1.0664, 0.999600000000001, 0.7956, 0.5366,
0.589399999999998, 0.573799999999999, 0.269799999999996,
0.368200000000002, 0.0544000000000011, 0.234200000000001,
0.0108000000000033, -0.203400000000002, -0.0701999999999998,
-0.129600000000003, -0.364199999999997, -0.480600000000003,
-0.226999999999997, -0.322800000000001, -0.382599999999996,
-0.511200000000002, -0.669600000000003, -0.749400000000001,
-0.500399999999999, -0.617600000000003, -0.6922, -0.601599999999998,
-0.416200000000003, -0.338200000000001, -0.782600000000002,
-0.648600000000002, -0.919800000000002, -0.851799999999997,
-0.962400000000002, -0.6402, -1.1922, -1.0256, -1.086,
-1.21899999999999, -0.819400000000002, -0.940600000000003, -1.1554,
-1.2072, -1.1752, -1.16759999999999, -1.14019999999999, -1.3754,
-1.29859999999999, -1.607, -1.3292, -1.7606],
&[22.0, 21.1194, 20.8208, 20.2318, 19.77, 19.2436, 18.7774, 18.2848,
17.8224, 17.3742, 16.9336, 16.503, 16.0494, 15.6292, 15.2124, 14.798,
14.367, 13.9728, 13.5944, 13.217, 12.8438, 12.3696, 12.0956, 11.7044,
11.324, 11.0668, 10.6698, 10.3644, 10.049, 9.6918, 9.4146, 9.082,
8.687, 8.5398, 8.2462, 7.857, 7.6606, 7.4168, 7.1248, 6.9222, 6.6804,
6.447, 6.3454, 5.9594, 5.7636, 5.5776, 5.331, 5.19, 4.9676, 4.7564,
4.5314, 4.4442, 4.3708, 3.9774, 3.9624, 3.8796, 3.755, 3.472, 3.2076,
3.1024, 2.8908, 2.7338, 2.7728, 2.629, 2.413, 2.3266, 2.1524, 2.2642,
2.1806, 2.0566, 1.9192, 1.7598, 1.3516, 1.5802, 1.43859999999999,
1.49160000000001, 1.1524, 1.1892, 0.841399999999993,
0.879800000000003, 0.837599999999995, 0.469800000000006,
0.765600000000006, 0.331000000000003, 0.591399999999993,
0.601200000000006, 0.701599999999999, 0.558199999999999,
0.339399999999998, 0.354399999999998, 0.491200000000006,
0.308000000000007, 0.355199999999996, -0.0254000000000048,
0.205200000000005, -0.272999999999996, 0.132199999999997,
0.394400000000005, -0.241200000000006, 0.242000000000004,
0.191400000000002, 0.253799999999998, -0.122399999999999,
-0.370800000000003, 0.193200000000004, -0.0848000000000013,
0.0867999999999967, -0.327200000000005, -0.285600000000002,
0.311400000000006, -0.128399999999999, -0.754999999999995,
-0.209199999999996, -0.293599999999998, -0.364000000000004,
-0.253600000000006, -0.821200000000005, -0.253600000000006,
-0.510400000000004, -0.383399999999995, -0.491799999999998,
-0.220200000000006, -0.0972000000000008, -0.557400000000001,
-0.114599999999996, -0.295000000000002, -0.534800000000004,
0.346399999999988, -0.65379999999999, 0.0398000000000138,
0.0341999999999985, -0.995800000000003, -0.523400000000009,
-0.489000000000004, -0.274799999999999, -0.574999999999989,
-0.482799999999997, 0.0571999999999946, -0.330600000000004,
-0.628800000000012, -0.140199999999993, -0.540600000000012,
-0.445999999999998, -0.599400000000003, -0.262599999999992,
0.163399999999996, -0.100599999999986, -0.39500000000001,
-1.06960000000001, -0.836399999999998, -0.753199999999993,
-0.412399999999991, -0.790400000000005, -0.29679999999999,
-0.28540000000001, -0.193000000000012, -0.0772000000000048,
-0.962799999999987, -0.414800000000014],
&[45.0, 44.1902, 43.271, 42.8358, 41.8142, 41.2854, 40.317, 39.354,
38.8924, 37.9436, 37.4596, 36.5262, 35.6248, 35.1574, 34.2822, 33.837,
32.9636, 32.074, 31.7042, 30.7976, 30.4772, 29.6564, 28.7942, 28.5004,
27.686, 27.291, 26.5672, 25.8556, 25.4982, 24.8204, 24.4252, 23.7744,
23.0786, 22.8344, 22.0294, 21.8098, 21.0794, 20.5732, 20.1878,
19.5648, 19.2902, 18.6784, 18.3352, 17.8946, 17.3712, 17.0852, 16.499,
16.2686, 15.6844, 15.2234, 14.9732, 14.3356, 14.2286, 13.7262,
13.3284, 13.1048, 12.5962, 12.3562, 12.1272, 11.4184, 11.4974,
11.0822, 10.856, 10.48, 10.2834, 10.0208, 9.637, 9.51739999999999,
9.05759999999999, 8.74760000000001, 8.42700000000001, 8.1326, 8.2372,
8.2788, 7.6776, 7.79259999999999, 7.1952, 6.9564, 6.6454, 6.87,
6.5428, 6.19999999999999, 6.02940000000001, 5.62780000000001, 5.6782,
5.792, 5.35159999999999, 5.28319999999999, 5.0394, 5.07480000000001,
4.49119999999999, 4.84899999999999, 4.696, 4.54040000000001,
4.07300000000001, 4.37139999999999, 3.7216, 3.7328, 3.42080000000001,
3.41839999999999, 3.94239999999999, 3.27719999999999, 3.411,
3.13079999999999, 2.76900000000001, 2.92580000000001,
2.68279999999999, 2.75020000000001, 2.70599999999999, 2.3886,
3.01859999999999, 2.45179999999999, 2.92699999999999,
2.41720000000001, 2.41139999999999, 2.03299999999999,
2.51240000000001, 2.5564, 2.60079999999999, 2.41720000000001,
1.80439999999999, 1.99700000000001, 2.45480000000001, 1.8948, 2.2346,
2.30860000000001, 2.15479999999999, 1.88419999999999, 1.6508,
0.677199999999999, 1.72540000000001, 1.4752, 1.72280000000001,
1.66139999999999, 1.16759999999999, 1.79300000000001,
1.00059999999999, 0.905200000000008, 0.659999999999997,
1.55879999999999, 1.1636, 0.688199999999995, 0.712600000000009,
0.450199999999995, 1.1978, 0.975599999999986, 0.165400000000005,
1.727, 1.19739999999999, -0.252600000000001, 1.13460000000001, 1.3048,
1.19479999999999, 0.313400000000001, 0.878999999999991,
1.12039999999999, 0.853000000000009, 1.67920000000001,
0.856999999999999, 0.448599999999999, 1.2362, 0.953399999999988,
1.02859999999998, 0.563199999999995, 0.663000000000011,
0.723000000000013, 0.756599999999992, 0.256599999999992,
-0.837600000000009, 0.620000000000005, 0.821599999999989,
0.216600000000028, 0.205600000000004, 0.220199999999977,
0.372599999999977, 0.334400000000016, 0.928400000000011,
0.972800000000007, 0.192400000000021, 0.487199999999973,
-0.413000000000011, 0.807000000000016, 0.120600000000024,
0.769000000000005, 0.870799999999974, 0.66500000000002,
0.118200000000002, 0.401200000000017, 0.635199999999998,
0.135400000000004, 0.175599999999974, 1.16059999999999,
0.34620000000001, 0.521400000000028, -0.586599999999976,
-1.16480000000001, 0.968399999999974, 0.836999999999989,
0.779600000000016, 0.985799999999983],
&[91.0, 89.4934, 87.9758, 86.4574, 84.9718, 83.4954, 81.5302, 80.0756,
78.6374, 77.1782, 75.7888, 73.9522, 72.592, 71.2532, 69.9086, 68.5938,
66.9474, 65.6796, 64.4394, 63.2176, 61.9768, 60.4214, 59.2528,
58.0102, 56.8658, 55.7278, 54.3044, 53.1316, 52.093, 51.0032, 49.9092,
48.6306, 47.5294, 46.5756, 45.6508, 44.662, 43.552, 42.3724, 41.617,
40.5754, 39.7872, 38.8444, 37.7988, 36.8606, 36.2118, 35.3566,
34.4476, 33.5882, 32.6816, 32.0824, 31.0258, 30.6048, 29.4436,
28.7274, 27.957, 27.147, 26.4364, 25.7592, 25.3386, 24.781, 23.8028,
23.656, 22.6544, 21.996, 21.4718, 21.1544, 20.6098, 19.5956, 19.0616,
18.5758, 18.4878, 17.5244, 17.2146, 16.724, 15.8722, 15.5198, 15.0414,
14.941, 14.9048, 13.87, 13.4304, 13.028, 12.4708, 12.37, 12.0624,
11.4668, 11.5532, 11.4352, 11.2564, 10.2744, 10.2118,
9.74720000000002, 10.1456, 9.2928, 8.75040000000001, 8.55279999999999,
8.97899999999998, 8.21019999999999, 8.18340000000001, 7.3494,
7.32499999999999, 7.66140000000001, 6.90300000000002,
7.25439999999998, 6.9042, 7.21499999999997, 6.28640000000001,
6.08139999999997, 6.6764, 6.30099999999999, 5.13900000000001,
5.65800000000002, 5.17320000000001, 4.59019999999998, 4.9538,
5.08280000000002, 4.92200000000003, 4.99020000000002, 4.7328, 5.4538,
4.11360000000002, 4.22340000000003, 4.08780000000002,
3.70800000000003, 4.15559999999999, 4.18520000000001,
3.63720000000001, 3.68220000000002, 3.77960000000002, 3.6078,
2.49160000000001, 3.13099999999997, 2.5376, 3.19880000000001,
3.21100000000001, 2.4502, 3.52820000000003, 2.91199999999998,
3.04480000000001, 2.7432, 2.85239999999999, 2.79880000000003,
2.78579999999999, 1.88679999999999, 2.98860000000002,
2.50639999999999, 1.91239999999999, 2.66160000000002,
2.46820000000002, 1.58199999999999, 1.30399999999997,
2.27379999999999, 2.68939999999998, 1.32900000000001,
3.10599999999999, 1.69080000000002, 2.13740000000001,
2.53219999999999, 1.88479999999998, 1.33240000000001,
1.45119999999997, 1.17899999999997, 2.44119999999998,
1.60659999999996, 2.16700000000003, 0.77940000000001,
2.37900000000002, 2.06700000000001, 1.46000000000004,
2.91160000000002, 1.69200000000001, 0.954600000000028,
2.49300000000005, 2.2722, 1.33500000000004, 2.44899999999996,
1.20140000000004, 3.07380000000001, 2.09739999999999,
2.85640000000001, 2.29960000000005, 2.40899999999999,
1.97040000000004, 0.809799999999996, 1.65279999999996,
2.59979999999996, 0.95799999999997, 2.06799999999998,
2.32780000000002, 4.20159999999998, 1.96320000000003,
1.86400000000003, 1.42999999999995, 3.77940000000001,
1.27200000000005, 1.86440000000005, 2.20600000000002,
3.21900000000005, 1.5154, 2.61019999999996],
&[183.2152, 180.2454, 177.2096, 173.6652, 170.6312, 167.6822, 164.249,
161.3296, 158.0038, 155.2074, 152.4612, 149.27, 146.5178, 143.4412,
140.8032, 138.1634, 135.1688, 132.6074, 129.6946, 127.2664, 124.8228,
122.0432, 119.6824, 116.9464, 114.6268, 112.2626, 109.8376, 107.4034,
104.8956, 102.8522, 100.7638, 98.3552, 96.3556, 93.7526, 91.9292,
89.8954, 87.8198, 85.7668, 83.298, 81.6688, 79.9466, 77.9746, 76.1672,
74.3474, 72.3028, 70.8912, 69.114, 67.4646, 65.9744, 64.4092, 62.6022,
60.843, 59.5684, 58.1652, 56.5426, 55.4152, 53.5388, 52.3592, 51.1366,
49.486, 48.3918, 46.5076, 45.509, 44.3834, 43.3498, 42.0668, 40.7346,
40.1228, 38.4528, 37.7, 36.644, 36.0518, 34.5774, 33.9068, 32.432,
32.1666, 30.434, 29.6644, 28.4894, 27.6312, 26.3804, 26.292,
25.5496000000001, 25.0234, 24.8206, 22.6146, 22.4188, 22.117, 20.6762,
20.6576, 19.7864, 19.509, 18.5334, 17.9204, 17.772, 16.2924, 16.8654,
15.1836, 15.745, 15.1316, 15.0386, 14.0136, 13.6342, 12.6196, 12.1866,
12.4281999999999, 11.3324, 10.4794000000001, 11.5038, 10.129,
9.52800000000002, 10.3203999999999, 9.46299999999997,
9.79280000000006, 9.12300000000005, 8.74180000000001, 9.2192,
7.51020000000005, 7.60659999999996, 7.01840000000004,
7.22239999999999, 7.40139999999997, 6.76179999999999,
7.14359999999999, 5.65060000000005, 5.63779999999997,
5.76599999999996, 6.75139999999999, 5.57759999999996,
3.73220000000003, 5.8048, 5.63019999999995, 4.93359999999996,
3.47979999999995, 4.33879999999999, 3.98940000000005,
3.81960000000004, 3.31359999999995, 3.23080000000004, 3.4588,
3.08159999999998, 3.4076, 3.00639999999999, 2.38779999999997,
2.61900000000003, 1.99800000000005, 3.34820000000002,
2.95060000000001, 0.990999999999985, 2.11440000000005,
2.20299999999997, 2.82219999999995, 2.73239999999998, 2.7826,
3.76660000000004, 2.26480000000004, 2.31280000000004,
2.40819999999997, 2.75360000000001, 3.33759999999995,
2.71559999999999, 1.7478000000001, 1.42920000000004, 2.39300000000003,
2.22779999999989, 2.34339999999997, 0.87259999999992,
3.88400000000001, 1.80600000000004, 1.91759999999999,
1.16779999999994, 1.50320000000011, 2.52500000000009,
0.226400000000012, 2.31500000000005, 0.930000000000064,
1.25199999999995, 2.14959999999996, 0.0407999999999902,
2.5447999999999, 1.32960000000003, 0.197400000000016,
2.52620000000002, 3.33279999999991, -1.34300000000007,
0.422199999999975, 0.917200000000093, 1.12920000000008,
1.46060000000011, 1.45779999999991, 2.8728000000001, 3.33359999999993,
-1.34079999999994, 1.57680000000005, 0.363000000000056,
1.40740000000005, 0.656600000000026, 0.801400000000058,
-0.454600000000028, 1.51919999999996],
&[368.0, 361.8294, 355.2452, 348.6698, 342.1464, 336.2024, 329.8782,
323.6598, 317.462, 311.2826, 305.7102, 299.7416, 293.9366, 288.1046,
282.285, 277.0668, 271.306, 265.8448, 260.301, 254.9886, 250.2422,
244.8138, 239.7074, 234.7428, 229.8402, 225.1664, 220.3534, 215.594,
210.6886, 205.7876, 201.65, 197.228, 192.8036, 188.1666, 184.0818,
180.0824, 176.2574, 172.302, 168.1644, 164.0056, 160.3802, 156.7192,
152.5234, 149.2084, 145.831, 142.485, 139.1112, 135.4764, 131.76,
129.3368, 126.5538, 122.5058, 119.2646, 116.5902, 113.3818, 110.8998,
107.9532, 105.2062, 102.2798, 99.4728, 96.9582, 94.3292, 92.171,
89.7809999999999, 87.5716, 84.7048, 82.5322, 79.875, 78.3972, 75.3464,
73.7274, 71.2834, 70.1444, 68.4263999999999, 66.0166, 64.018,
62.0437999999999, 60.3399999999999, 58.6856, 57.9836,
55.0311999999999, 54.6769999999999, 52.3188, 51.4846,
49.4423999999999, 47.739, 46.1487999999999, 44.9202, 43.4059999999999,
42.5342000000001, 41.2834, 38.8954000000001, 38.3286000000001,
36.2146, 36.6684, 35.9946, 33.123, 33.4338, 31.7378000000001, 29.076,
28.9692, 27.4964, 27.0998, 25.9864, 26.7754, 24.3208, 23.4838,
22.7388000000001, 24.0758000000001, 21.9097999999999, 20.9728,
19.9228000000001, 19.9292, 16.617, 17.05, 18.2996000000001,
15.6128000000001, 15.7392, 14.5174, 13.6322, 12.2583999999999,
13.3766000000001, 11.423, 13.1232, 9.51639999999998, 10.5938000000001,
9.59719999999993, 8.12220000000002, 9.76739999999995,
7.50440000000003, 7.56999999999994, 6.70440000000008,
6.41419999999994, 6.71019999999999, 5.60940000000005,
4.65219999999999, 6.84099999999989, 3.4072000000001, 3.97859999999991,
3.32760000000007, 5.52160000000003, 3.31860000000006,
2.06940000000009, 4.35400000000004, 1.57500000000005,
0.280799999999999, 2.12879999999996, -0.214799999999968,
-0.0378000000000611, -0.658200000000079, 0.654800000000023,
-0.0697999999999865, 0.858400000000074, -2.52700000000004,
-2.1751999999999, -3.35539999999992, -1.04019999999991,
-0.651000000000067, -2.14439999999991, -1.96659999999997,
-3.97939999999994, -0.604400000000169, -3.08260000000018,
-3.39159999999993, -5.29640000000018, -5.38920000000007,
-5.08759999999984, -4.69900000000007, -5.23720000000003,
-3.15779999999995, -4.97879999999986, -4.89899999999989,
-7.48880000000008, -5.94799999999987, -5.68060000000014,
-6.67180000000008, -4.70499999999993, -7.27779999999984,
-4.6579999999999, -4.4362000000001, -4.32139999999981,
-5.18859999999995, -6.66879999999992, -6.48399999999992,
-5.1260000000002, -4.4032000000002, -6.13500000000022,
-5.80819999999994, -4.16719999999987, -4.15039999999999,
-7.45600000000013, -7.24080000000004, -9.83179999999993,
-5.80420000000004, -8.6561999999999, -6.99940000000015,
-10.5473999999999, -7.34139999999979, -6.80999999999995,
-6.29719999999998, -6.23199999999997],
&[737.1256, 724.4234, 711.1064, 698.4732, 685.4636, 673.0644, 660.488,
647.9654, 636.0832, 623.7864, 612.1992, 600.2176, 588.5228, 577.1716,
565.7752, 554.899, 543.6126, 532.6492, 521.9474, 511.5214, 501.1064,
490.6364, 480.2468, 470.4588, 460.3832, 451.0584, 440.8606, 431.3868,
422.5062, 413.1862, 404.463, 395.339, 386.1936, 378.1292, 369.1854,
361.2908, 353.3324, 344.8518, 337.5204, 329.4854, 321.9318, 314.552,
306.4658, 299.4256, 292.849, 286.152, 278.8956, 271.8792, 265.118,
258.62, 252.5132, 245.9322, 239.7726, 233.6086, 227.5332, 222.5918,
216.4294, 210.7662, 205.4106, 199.7338, 194.9012, 188.4486, 183.1556,
178.6338, 173.7312, 169.6264, 163.9526, 159.8742, 155.8326, 151.1966,
147.5594, 143.07, 140.037, 134.1804, 131.071, 127.4884, 124.0848,
120.2944, 117.333, 112.9626, 110.2902, 107.0814, 103.0334,
99.4832000000001, 96.3899999999999, 93.7202000000002,
90.1714000000002, 87.2357999999999, 85.9346, 82.8910000000001,
80.0264000000002, 78.3834000000002, 75.1543999999999,
73.8683999999998, 70.9895999999999, 69.4367999999999,
64.8701999999998, 65.0408000000002, 61.6738, 59.5207999999998,
57.0158000000001, 54.2302, 53.0962, 50.4985999999999,
52.2588000000001, 47.3914, 45.6244000000002, 42.8377999999998,
43.0072, 40.6516000000001, 40.2453999999998, 35.2136, 36.4546,
33.7849999999999, 33.2294000000002, 32.4679999999998,
30.8670000000002, 28.6507999999999, 28.9099999999999,
27.5983999999999, 26.1619999999998, 24.5563999999999,
23.2328000000002, 21.9484000000002, 21.5902000000001,
21.3346000000001, 17.7031999999999, 20.6111999999998,
19.5545999999999, 15.7375999999999, 17.0720000000001,
16.9517999999998, 15.326, 13.1817999999998, 14.6925999999999,
13.0859999999998, 13.2754, 10.8697999999999, 11.248, 7.3768,
4.72339999999986, 7.97899999999981, 8.7503999999999, 7.68119999999999,
9.7199999999998, 7.73919999999998, 5.6224000000002, 7.44560000000001,
6.6601999999998, 5.9058, 4.00199999999995, 4.51699999999983,
4.68240000000014, 3.86220000000003, 5.13639999999987,
5.98500000000013, 2.47719999999981, 2.61999999999989,
1.62800000000016, 4.65000000000009, 0.225599999999758,
0.831000000000131, -0.359400000000278, 1.27599999999984,
-2.92559999999958, -0.0303999999996449, 2.37079999999969,
-2.0033999999996, 0.804600000000391, 0.30199999999968,
1.1247999999996, -2.6880000000001, 0.0321999999996478,
-1.18099999999959, -3.9402, -1.47940000000017, -0.188400000000001,
-2.10720000000038, -2.04159999999956, -3.12880000000041,
-4.16160000000036, -0.612799999999879, -3.48719999999958,
-8.17900000000009, -5.37780000000021, -4.01379999999972,
-5.58259999999973, -5.73719999999958, -7.66799999999967,
-5.69520000000011, -1.1247999999996, -5.58520000000044,
-8.04560000000038, -4.64840000000004, -11.6468000000004,
-7.97519999999986, -5.78300000000036, -7.67420000000038,
-10.6328000000003, -9.81720000000041],
&[1476.0, 1449.6014, 1423.5802, 1397.7942, 1372.3042, 1347.2062,
1321.8402, 1297.2292, 1272.9462, 1248.9926, 1225.3026, 1201.4252,
1178.0578, 1155.6092, 1132.626, 1110.5568, 1088.527, 1066.5154,
1045.1874, 1024.3878, 1003.37, 982.1972, 962.5728, 942.1012, 922.9668,
903.292, 884.0772, 864.8578, 846.6562, 828.041, 809.714, 792.3112,
775.1806, 757.9854, 740.656, 724.346, 707.5154, 691.8378, 675.7448,
659.6722, 645.5722, 630.1462, 614.4124, 600.8728, 585.898, 572.408,
558.4926, 544.4938, 531.6776, 517.282, 505.7704, 493.1012, 480.7388,
467.6876, 456.1872, 445.5048, 433.0214, 420.806, 411.409, 400.4144,
389.4294, 379.2286, 369.651, 360.6156, 350.337, 342.083, 332.1538,
322.5094, 315.01, 305.6686, 298.1678, 287.8116, 280.9978, 271.9204,
265.3286, 257.5706, 249.6014, 242.544, 235.5976, 229.583, 220.9438,
214.672, 208.2786, 201.8628, 195.1834, 191.505, 186.1816, 178.5188,
172.2294, 167.8908, 161.0194, 158.052, 151.4588, 148.1596, 143.4344,
138.5238, 133.13, 127.6374, 124.8162, 118.7894, 117.3984, 114.6078,
109.0858, 105.1036, 103.6258, 98.6018000000004, 95.7618000000002,
93.5821999999998, 88.5900000000001, 86.9992000000002,
82.8800000000001, 80.4539999999997, 74.6981999999998,
74.3644000000004, 73.2914000000001, 65.5709999999999,
66.9232000000002, 65.1913999999997, 62.5882000000001,
61.5702000000001, 55.7035999999998, 56.1764000000003,
52.7596000000003, 53.0302000000001, 49.0609999999997, 48.4694, 44.933,
46.0474000000004, 44.7165999999997, 41.9416000000001,
39.9207999999999, 35.6328000000003, 35.5276000000003,
33.1934000000001, 33.2371999999996, 33.3864000000003,
33.9228000000003, 30.2371999999996, 29.1373999999996,
25.2272000000003, 24.2942000000003, 19.8338000000003,
18.9005999999999, 23.0907999999999, 21.8544000000002,
19.5176000000001, 15.4147999999996, 16.9314000000004,
18.6737999999996, 12.9877999999999, 14.3688000000002,
12.0447999999997, 15.5219999999999, 12.5299999999997,
14.5940000000001, 14.3131999999996, 9.45499999999993,
12.9441999999999, 3.91139999999996, 13.1373999999996,
5.44720000000052, 9.82779999999912, 7.87279999999919,
3.67760000000089, 5.46980000000076, 5.55099999999948,
5.65979999999945, 3.89439999999922, 3.1275999999998, 5.65140000000065,
6.3062000000009, 3.90799999999945, 1.87060000000019, 5.17020000000048,
2.46680000000015, 0.770000000000437, -3.72340000000077,
1.16400000000067, 8.05340000000069, 0.135399999999208,
2.15940000000046, 0.766999999999825, 1.0594000000001,
3.15500000000065, -0.287399999999252, 2.37219999999979,
-2.86620000000039, -1.63199999999961, -2.22979999999916,
-0.15519999999924, -1.46039999999994, -0.262199999999211,
-2.34460000000036, -2.8078000000005, -3.22179999999935,
-5.60159999999996, -8.42200000000048, -9.43740000000071,
0.161799999999857, -10.4755999999998, -10.0823999999993],
&[2953.0, 2900.4782, 2848.3568, 2796.3666, 2745.324, 2694.9598,
2644.648, 2595.539, 2546.1474, 2498.2576, 2450.8376, 2403.6076,
2357.451, 2311.38, 2266.4104, 2221.5638, 2176.9676, 2134.193,
2090.838, 2048.8548, 2007.018, 1966.1742, 1925.4482, 1885.1294,
1846.4776, 1807.4044, 1768.8724, 1731.3732, 1693.4304, 1657.5326,
1621.949, 1586.5532, 1551.7256, 1517.6182, 1483.5186, 1450.4528,
1417.865, 1385.7164, 1352.6828, 1322.6708, 1291.8312, 1260.9036,
1231.476, 1201.8652, 1173.6718, 1145.757, 1119.2072, 1092.2828,
1065.0434, 1038.6264, 1014.3192, 988.5746, 965.0816, 940.1176,
917.9796, 894.5576, 871.1858, 849.9144, 827.1142, 805.0818, 783.9664,
763.9096, 742.0816, 724.3962, 706.3454, 688.018, 667.4214, 650.3106,
633.0686, 613.8094, 597.818, 581.4248, 563.834, 547.363, 531.5066,
520.455400000001, 505.583199999999, 488.366, 476.480799999999,
459.7682, 450.0522, 434.328799999999, 423.952799999999,
408.727000000001, 399.079400000001, 387.252200000001,
373.987999999999, 360.852000000001, 351.6394, 339.642,
330.902400000001, 322.661599999999, 311.662200000001, 301.3254,
291.7484, 279.939200000001, 276.7508, 263.215200000001,
254.811400000001, 245.5494, 242.306399999999, 234.8734,
223.787200000001, 217.7156, 212.0196, 200.793, 195.9748, 189.0702,
182.449199999999, 177.2772, 170.2336, 164.741, 158.613600000001,
155.311, 147.5964, 142.837, 137.3724, 132.0162, 130.0424, 121.9804,
120.451800000001, 114.8968, 111.585999999999, 105.933199999999,
101.705, 98.5141999999996, 95.0488000000005, 89.7880000000005,
91.4750000000004, 83.7764000000006, 80.9698000000008,
72.8574000000008, 73.1615999999995, 67.5838000000003,
62.6263999999992, 63.2638000000006, 66.0977999999996,
52.0843999999997, 58.9956000000002, 47.0912000000008,
46.4956000000002, 48.4383999999991, 47.1082000000006, 43.2392,
37.2759999999998, 40.0283999999992, 35.1864000000005,
35.8595999999998, 32.0998, 28.027, 23.6694000000007, 33.8266000000003,
26.3736000000008, 27.2008000000005, 21.3245999999999,
26.4115999999995, 23.4521999999997, 19.5013999999992,
19.8513999999996, 10.7492000000002, 18.6424000000006,
13.1265999999996, 18.2436000000016, 6.71860000000015,
3.39459999999963, 6.33759999999893, 7.76719999999841,
0.813999999998487, 3.82819999999992, 0.826199999999517,
8.07440000000133, -1.59080000000176, 5.01780000000144,
0.455399999998917, -0.24199999999837, 0.174800000000687,
-9.07640000000174, -4.20160000000033, -3.77520000000004,
-4.75179999999818, -5.3724000000002, -8.90680000000066,
-6.10239999999976, -5.74120000000039, -9.95339999999851,
-3.86339999999836, -13.7304000000004, -16.2710000000006,
-7.51359999999841, -3.30679999999847, -13.1339999999982,
-10.0551999999989, -6.72019999999975, -8.59660000000076,
-10.9307999999983, -1.8775999999998, -4.82259999999951, -13.7788,
-21.6470000000008, -10.6735999999983, -15.7799999999988],
&[5907.5052, 5802.2672, 5697.347, 5593.5794, 5491.2622, 5390.5514,
5290.3376, 5191.6952, 5093.5988, 4997.3552, 4902.5972, 4808.3082,
4715.5646, 4624.109, 4533.8216, 4444.4344, 4356.3802, 4269.2962,
4183.3784, 4098.292, 4014.79, 3932.4574, 3850.6036, 3771.2712,
3691.7708, 3615.099, 3538.1858, 3463.4746, 3388.8496, 3315.6794,
3244.5448, 3173.7516, 3103.3106, 3033.6094, 2966.5642, 2900.794,
2833.7256, 2769.81, 2707.3196, 2644.0778, 2583.9916, 2523.4662,
2464.124, 2406.073, 2347.0362, 2292.1006, 2238.1716, 2182.7514,
2128.4884, 2077.1314, 2025.037, 1975.3756, 1928.933, 1879.311,
1831.0006, 1783.2144, 1738.3096, 1694.5144, 1649.024, 1606.847,
1564.7528, 1525.3168, 1482.5372, 1443.9668, 1406.5074, 1365.867,
1329.2186, 1295.4186, 1257.9716, 1225.339, 1193.2972, 1156.3578,
1125.8686, 1091.187, 1061.4094, 1029.4188, 1000.9126, 972.3272,
944.004199999999, 915.7592, 889.965, 862.834200000001, 840.4254,
812.598399999999, 785.924200000001, 763.050999999999,
741.793799999999, 721.466, 699.040799999999, 677.997200000002,
649.866999999998, 634.911800000002, 609.8694, 591.981599999999,
570.2922, 557.129199999999, 538.3858, 521.872599999999,
502.951400000002, 495.776399999999, 475.171399999999, 459.751,
439.995200000001, 426.708999999999, 413.7016, 402.3868,
387.262599999998, 372.0524, 357.050999999999, 342.5098,
334.849200000001, 322.529399999999, 311.613799999999,
295.848000000002, 289.273000000001, 274.093000000001,
263.329600000001, 251.389599999999, 245.7392, 231.9614, 229.7952,
217.155200000001, 208.9588, 199.016599999999, 190.839199999999,
180.6976, 176.272799999999, 166.976999999999, 162.5252,
151.196400000001, 149.386999999999, 133.981199999998, 130.0586,
130.164000000001, 122.053400000001, 110.7428, 108.1276,
106.232400000001, 100.381600000001, 98.7668000000012,
86.6440000000002, 79.9768000000004, 82.4722000000002,
68.7026000000005, 70.1186000000016, 71.9948000000004, 58.998599999999,
59.0492000000013, 56.9818000000014, 47.5338000000011, 42.9928,
51.1591999999982, 37.2740000000013, 42.7220000000016,
31.3734000000004, 26.8090000000011, 25.8934000000008,
26.5286000000015, 29.5442000000003, 19.3503999999994,
26.0760000000009, 17.9527999999991, 14.8419999999969,
10.4683999999979, 8.65899999999965, 9.86720000000059,
4.34139999999752, -0.907800000000861, -3.32080000000133,
-0.936199999996461, -11.9916000000012, -8.87000000000262,
-6.33099999999831, -11.3366000000024, -15.9207999999999,
-9.34659999999712, -15.5034000000014, -19.2097999999969,
-15.357799999998, -28.2235999999975, -30.6898000000001,
-19.3271999999997, -25.6083999999973, -24.409599999999,
-13.6385999999984, -33.4473999999973, -32.6949999999997,
-28.9063999999998, -31.7483999999968, -32.2935999999972,
-35.8329999999987, -47.620600000002, -39.0855999999985,
-33.1434000000008, -46.1371999999974, -37.5892000000022,
-46.8164000000033, -47.3142000000007, -60.2914000000019,
-37.7575999999972],
&[11816.475, 11605.0046, 11395.3792, 11188.7504, 10984.1814, 10782.0086,
10582.0072, 10384.503, 10189.178, 9996.2738, 9806.0344, 9617.9798,
9431.394, 9248.7784, 9067.6894, 8889.6824, 8712.9134, 8538.8624,
8368.4944, 8197.7956, 8031.8916, 7866.6316, 7703.733, 7544.5726,
7386.204, 7230.666, 7077.8516, 6926.7886, 6778.6902, 6631.9632,
6487.304, 6346.7486, 6206.4408, 6070.202, 5935.2576, 5799.924,
5671.0324, 5541.9788, 5414.6112, 5290.0274, 5166.723, 5047.6906,
4929.162, 4815.1406, 4699.127, 4588.5606, 4477.7394, 4369.4014,
4264.2728, 4155.9224, 4055.581, 3955.505, 3856.9618, 3761.3828,
3666.9702, 3575.7764, 3482.4132, 3395.0186, 3305.8852, 3221.415,
3138.6024, 3056.296, 2970.4494, 2896.1526, 2816.8008, 2740.2156,
2670.497, 2594.1458, 2527.111, 2460.8168, 2387.5114, 2322.9498,
2260.6752, 2194.2686, 2133.7792, 2074.767, 2015.204, 1959.4226,
1898.6502, 1850.006, 1792.849, 1741.4838, 1687.9778, 1638.1322,
1589.3266, 1543.1394, 1496.8266, 1447.8516, 1402.7354, 1361.9606,
1327.0692, 1285.4106, 1241.8112, 1201.6726, 1161.973, 1130.261,
1094.2036, 1048.2036, 1020.6436, 990.901400000002, 961.199800000002,
924.769800000002, 899.526400000002, 872.346400000002, 834.375,
810.432000000001, 780.659800000001, 756.013800000001,
733.479399999997, 707.923999999999, 673.858, 652.222399999999,
636.572399999997, 615.738599999997, 586.696400000001,
564.147199999999, 541.679600000003, 523.943599999999,
505.714599999999, 475.729599999999, 461.779600000002,
449.750800000002, 439.020799999998, 412.7886, 400.245600000002,
383.188199999997, 362.079599999997, 357.533799999997,
334.319000000003, 327.553399999997, 308.559399999998,
291.270199999999, 279.351999999999, 271.791400000002,
252.576999999997, 247.482400000001, 236.174800000001,
218.774599999997, 220.155200000001, 208.794399999999,
201.223599999998, 182.995600000002, 185.5268, 164.547400000003,
176.5962, 150.689599999998, 157.8004, 138.378799999999,
134.021200000003, 117.614399999999, 108.194000000003,
97.0696000000025, 89.6042000000016, 95.6030000000028,
84.7810000000027, 72.635000000002, 77.3482000000004, 59.4907999999996,
55.5875999999989, 50.7346000000034, 61.3916000000027,
50.9149999999936, 39.0384000000049, 58.9395999999979, 29.633600000001,
28.2032000000036, 26.0078000000067, 17.0387999999948,
9.22000000000116, 13.8387999999977, 8.07240000000456,
14.1549999999988, 15.3570000000036, 3.42660000000615,
6.24820000000182, -2.96940000000177, -8.79940000000352,
-5.97860000000219, -14.4048000000039, -3.4143999999942,
-13.0148000000045, -11.6977999999945, -25.7878000000055,
-22.3185999999987, -24.409599999999, -31.9756000000052,
-18.9722000000038, -22.8678000000073, -30.8972000000067,
-32.3715999999986, -22.3907999999938, -43.6720000000059, -35.9038,
-39.7492000000057, -54.1641999999993, -45.2749999999942,
-42.2989999999991, -44.1089999999967, -64.3564000000042,
-49.9551999999967, -42.6116000000038],
&[23634.0036, 23210.8034, 22792.4744, 22379.1524, 21969.7928, 21565.326,
21165.3532, 20770.2806, 20379.9892, 19994.7098, 19613.318, 19236.799,
18865.4382, 18498.8244, 18136.5138, 17778.8668, 17426.2344, 17079.32,
16734.778, 16397.2418, 16063.3324, 15734.0232, 15409.731, 15088.728,
14772.9896, 14464.1402, 14157.5588, 13855.5958, 13559.3296,
13264.9096, 12978.326, 12692.0826, 12413.8816, 12137.3192, 11870.2326,
11602.5554, 11340.3142, 11079.613, 10829.5908, 10583.5466, 10334.0344,
10095.5072, 9859.694, 9625.2822, 9395.7862, 9174.0586, 8957.3164,
8738.064, 8524.155, 8313.7396, 8116.9168, 7913.542, 7718.4778,
7521.65, 7335.5596, 7154.2906, 6968.7396, 6786.3996, 6613.236,
6437.406, 6270.6598, 6107.7958, 5945.7174, 5787.6784, 5635.5784,
5482.308, 5337.9784, 5190.0864, 5045.9158, 4919.1386, 4771.817,
4645.7742, 4518.4774, 4385.5454, 4262.6622, 4142.74679999999,
4015.5318, 3897.9276, 3790.7764, 3685.13800000001, 3573.6274,
3467.9706, 3368.61079999999, 3271.5202, 3170.3848, 3076.4656,
2982.38400000001, 2888.4664, 2806.4868, 2711.9564, 2634.1434,
2551.3204, 2469.7662, 2396.61139999999, 2318.9902, 2243.8658,
2171.9246, 2105.01360000001, 2028.8536, 1960.9952, 1901.4096,
1841.86079999999, 1777.54700000001, 1714.5802, 1654.65059999999,
1596.311, 1546.2016, 1492.3296, 1433.8974, 1383.84600000001,
1339.4152, 1293.5518, 1245.8686, 1193.50659999999, 1162.27959999999,
1107.19439999999, 1069.18060000001, 1035.09179999999,
999.679000000004, 957.679999999993, 925.300199999998,
888.099400000006, 848.638600000006, 818.156400000007,
796.748399999997, 752.139200000005, 725.271200000003, 692.216,
671.633600000001, 647.939799999993, 621.670599999998,
575.398799999995, 561.226599999995, 532.237999999998,
521.787599999996, 483.095799999996, 467.049599999998,
465.286399999997, 415.548599999995, 401.047399999996,
380.607999999993, 377.362599999993, 347.258799999996,
338.371599999999, 310.096999999994, 301.409199999995,
276.280799999993, 265.586800000005, 258.994399999996,
223.915999999997, 215.925399999993, 213.503800000006,
191.045400000003, 166.718200000003, 166.259000000005,
162.941200000001, 148.829400000002, 141.645999999993,
123.535399999993, 122.329800000007, 89.473399999988, 80.1962000000058,
77.5457999999926, 59.1056000000099, 83.3509999999951,
52.2906000000075, 36.3979999999865, 40.6558000000077,
42.0003999999899, 19.6630000000005, 19.7153999999864,
-8.38539999999921, -0.692799999989802, 0.854800000000978,
3.23219999999856, -3.89040000000386, -5.25880000001052,
-24.9052000000083, -22.6837999999989, -26.4286000000138,
-34.997000000003, -37.0216000000073, -43.430400000012,
-58.2390000000014, -68.8034000000043, -56.9245999999985,
-57.8583999999973, -77.3097999999882, -73.2793999999994,
-81.0738000000129, -87.4530000000086, -65.0254000000132,
-57.296399999992, -96.2746000000043, -103.25, -96.081600000005,
-91.5542000000132, -102.465200000006, -107.688599999994,
-101.458000000013, -109.715800000005],
&[47270.0, 46423.3584, 45585.7074, 44757.152, 43938.8416, 43130.9514,
42330.03, 41540.407, 40759.6348, 39988.206, 39226.5144, 38473.2096,
37729.795, 36997.268, 36272.6448, 35558.665, 34853.0248, 34157.4472,
33470.5204, 32793.5742, 32127.0194, 31469.4182, 30817.6136,
30178.6968, 29546.8908, 28922.8544, 28312.271, 27707.0924, 27114.0326,
26526.692, 25948.6336, 25383.7826, 24823.5998, 24272.2974, 23732.2572,
23201.4976, 22674.2796, 22163.6336, 21656.515, 21161.7362, 20669.9368,
20189.4424, 19717.3358, 19256.3744, 18795.9638, 18352.197, 17908.5738,
17474.391, 17052.918, 16637.2236, 16228.4602, 15823.3474, 15428.6974,
15043.0284, 14667.6278, 14297.4588, 13935.2882, 13578.5402,
13234.6032, 12882.1578, 12548.0728, 12219.231, 11898.0072, 11587.2626,
11279.9072, 10973.5048, 10678.5186, 10392.4876, 10105.2556, 9825.766,
9562.5444, 9294.2222, 9038.2352, 8784.848, 8533.2644, 8301.7776,
8058.30859999999, 7822.94579999999, 7599.11319999999,
7366.90779999999, 7161.217, 6957.53080000001, 6736.212,
6548.21220000001, 6343.06839999999, 6156.28719999999,
5975.15419999999, 5791.75719999999, 5621.32019999999, 5451.66,
5287.61040000001, 5118.09479999999, 4957.288, 4798.4246,
4662.17559999999, 4512.05900000001, 4364.68539999999,
4220.77720000001, 4082.67259999999, 3957.19519999999,
3842.15779999999, 3699.3328, 3583.01180000001, 3473.8964,
3338.66639999999, 3233.55559999999, 3117.799, 3008.111,
2909.69140000001, 2814.86499999999, 2719.46119999999, 2624.742,
2532.46979999999, 2444.7886, 2370.1868, 2272.45259999999,
2196.19260000001, 2117.90419999999, 2023.2972, 1969.76819999999,
1885.58979999999, 1833.2824, 1733.91200000001, 1682.54920000001,
1604.57980000001, 1556.11240000001, 1491.3064, 1421.71960000001,
1371.22899999999, 1322.1324, 1264.7892, 1196.23920000001, 1143.8474,
1088.67240000001, 1073.60380000001, 1023.11660000001,
959.036400000012, 927.433199999999, 906.792799999996,
853.433599999989, 841.873800000001, 791.1054, 756.899999999994,
704.343200000003, 672.495599999995, 622.790399999998,
611.254799999995, 567.283200000005, 519.406599999988,
519.188400000014, 495.312800000014, 451.350799999986,
443.973399999988, 431.882199999993, 392.027000000002,
380.924200000009, 345.128999999986, 298.901400000002,
287.771999999997, 272.625, 247.253000000026, 222.490600000019,
223.590000000026, 196.407599999977, 176.425999999978,
134.725199999986, 132.4804, 110.445599999977, 86.7939999999944,
56.7038000000175, 64.915399999998, 38.3726000000024, 37.1606000000029,
46.170999999973, 49.1716000000015, 15.3362000000197, 6.71639999997569,
-34.8185999999987, -39.4476000000141, 12.6830000000191,
-12.3331999999937, -50.6565999999875, -59.9538000000175,
-65.1054000000004, -70.7576000000117, -106.325200000021,
-126.852200000023, -110.227599999984, -132.885999999999,
-113.897200000007, -142.713800000027, -151.145399999979,
-150.799200000009, -177.756200000003, -156.036399999983,
-182.735199999996, -177.259399999981, -198.663600000029,
-174.577600000019, -193.84580000001],
&[94541.0, 92848.811, 91174.019, 89517.558, 87879.9705, 86262.7565,
84663.5125, 83083.7435, 81521.7865, 79977.272, 78455.9465, 76950.219,
75465.432, 73994.152, 72546.71, 71115.2345, 69705.6765, 68314.937,
66944.2705, 65591.255, 64252.9485, 62938.016, 61636.8225, 60355.592,
59092.789, 57850.568, 56624.518, 55417.343, 54231.1415, 53067.387,
51903.526, 50774.649, 49657.6415, 48561.05, 47475.7575, 46410.159,
45364.852, 44327.053, 43318.4005, 42325.6165, 41348.4595, 40383.6265,
39436.77, 38509.502, 37594.035, 36695.939, 35818.6895, 34955.691,
34115.8095, 33293.949, 32465.0775, 31657.6715, 30877.2585, 30093.78,
29351.3695, 28594.1365, 27872.115, 27168.7465, 26477.076, 25774.541,
25106.5375, 24452.5135, 23815.5125, 23174.0655, 22555.2685,
21960.2065, 21376.3555, 20785.1925, 20211.517, 19657.0725, 19141.6865,
18579.737, 18081.3955, 17578.995, 17073.44, 16608.335, 16119.911,
15651.266, 15194.583, 14749.0495, 14343.4835, 13925.639, 13504.509,
13099.3885, 12691.2855, 12328.018, 11969.0345, 11596.5145, 11245.6355,
10917.6575, 10580.9785, 10277.8605, 9926.58100000001, 9605.538,
9300.42950000003, 8989.97850000003, 8728.73249999998, 8448.3235,
8175.31050000002, 7898.98700000002, 7629.79100000003,
7413.76199999999, 7149.92300000001, 6921.12650000001, 6677.1545,
6443.28000000003, 6278.23450000002, 6014.20049999998,
5791.20299999998, 5605.78450000001, 5438.48800000001, 5234.2255,
5059.6825, 4887.43349999998, 4682.935, 4496.31099999999,
4322.52250000002, 4191.42499999999, 4021.24200000003,
3900.64799999999, 3762.84250000003, 3609.98050000001,
3502.29599999997, 3363.84250000003, 3206.54849999998,
3079.70000000001, 2971.42300000001, 2867.80349999998,
2727.08100000001, 2630.74900000001, 2496.6165, 2440.902,
2356.19150000002, 2235.58199999999, 2120.54149999999,
2012.25449999998, 1933.35600000003, 1820.93099999998,
1761.54800000001, 1663.09350000002, 1578.84600000002,
1509.48149999999, 1427.3345, 1379.56150000001, 1306.68099999998,
1212.63449999999, 1084.17300000001, 1124.16450000001,
1060.69949999999, 1007.48849999998, 941.194499999983,
879.880500000028, 836.007500000007, 782.802000000025,
748.385499999975, 647.991500000004, 626.730500000005,
570.776000000013, 484.000500000024, 513.98550000001, 418.985499999952,
386.996999999974, 370.026500000036, 355.496999999974,
356.731499999994, 255.92200000002, 259.094000000041, 205.434499999974,
165.374500000034, 197.347500000033, 95.718499999959, 67.6165000000037,
54.6970000000438, 31.7395000000251, -15.8784999999916,
8.42500000004657, -26.3754999999655, -118.425500000012,
-66.6629999999423, -42.9745000000112, -107.364999999991,
-189.839000000036, -162.611499999999, -164.964999999967,
-189.079999999958, -223.931499999948, -235.329999999958,
-269.639500000048, -249.087999999989, -206.475499999942,
-283.04449999996, -290.667000000016, -304.561499999953,
-336.784499999951, -380.386500000022, -283.280499999993,
-364.533000000054, -389.059499999974, -364.454000000027,
-415.748000000021, -417.155000000028],
&[189083.0, 185696.913, 182348.774, 179035.946, 175762.762, 172526.444,
169329.754, 166166.099, 163043.269, 159958.91, 156907.912, 153906.845,
150924.199, 147996.568, 145093.457, 142239.233, 139421.475, 136632.27,
133889.588, 131174.2, 128511.619, 125868.621, 123265.385, 120721.061,
118181.769, 115709.456, 113252.446, 110840.198, 108465.099,
106126.164, 103823.469, 101556.618, 99308.004, 97124.508, 94937.803,
92833.731, 90745.061, 88677.627, 86617.47, 84650.442, 82697.833,
80769.132, 78879.629, 77014.432, 75215.626, 73384.587, 71652.482,
69895.93, 68209.301, 66553.669, 64921.981, 63310.323, 61742.115,
60205.018, 58698.658, 57190.657, 55760.865, 54331.169, 52908.167,
51550.273, 50225.254, 48922.421, 47614.533, 46362.049, 45098.569,
43926.083, 42736.03, 41593.473, 40425.26, 39316.237, 38243.651,
37170.617, 36114.609, 35084.19, 34117.233, 33206.509, 32231.505,
31318.728, 30403.404, 29540.0550000001, 28679.236, 27825.862,
26965.216, 26179.148, 25462.08, 24645.952, 23922.523, 23198.144,
22529.128, 21762.4179999999, 21134.779, 20459.117, 19840.818,
19187.04, 18636.3689999999, 17982.831, 17439.7389999999, 16874.547,
16358.2169999999, 15835.684, 15352.914, 14823.681, 14329.313,
13816.897, 13342.874, 12880.882, 12491.648, 12021.254, 11625.392,
11293.7610000001, 10813.697, 10456.209, 10099.074, 9755.39000000001,
9393.18500000006, 9047.57900000003, 8657.98499999999,
8395.85900000005, 8033.0, 7736.95900000003, 7430.59699999995,
7258.47699999996, 6924.58200000005, 6691.29399999999,
6357.92500000005, 6202.05700000003, 5921.19700000004,
5628.28399999999, 5404.96799999999, 5226.71100000001,
4990.75600000005, 4799.77399999998, 4622.93099999998, 4472.478,
4171.78700000001, 3957.46299999999, 3868.95200000005,
3691.14300000004, 3474.63100000005, 3341.67200000002,
3109.14000000001, 3071.97400000005, 2796.40399999998,
2756.17799999996, 2611.46999999997, 2471.93000000005,
2382.26399999997, 2209.22400000005, 2142.28399999999,
2013.96100000001, 1911.18999999994, 1818.27099999995,
1668.47900000005, 1519.65800000005, 1469.67599999998,
1367.13800000004, 1248.52899999998, 1181.23600000003,
1022.71900000004, 1088.20700000005, 959.03600000008, 876.095999999903,
791.183999999892, 703.337000000058, 731.949999999953, 586.86400000006,
526.024999999907, 323.004999999888, 320.448000000091,
340.672999999952, 309.638999999966, 216.601999999955,
102.922999999952, 19.2399999999907, -0.114000000059605,
-32.6240000000689, -89.3179999999702, -153.497999999905,
-64.2970000000205, -143.695999999996, -259.497999999905,
-253.017999999924, -213.948000000091, -397.590000000084,
-434.006000000052, -403.475000000093, -297.958000000101,
-404.317000000039, -528.898999999976, -506.621000000043,
-513.205000000075, -479.351000000024, -596.139999999898,
-527.016999999993, -664.681000000099, -680.306000000099,
-704.050000000047, -850.486000000034, -757.43200000003,
-713.308999999892]];
pub struct HyperLogLog {
priv alpha: f64,
priv p: u8,
priv m: uint,
priv M: ~[u8],
priv hash_key_1: u64,
priv hash_key_2: u64,
}
impl HyperLogLog {
pub fn new(error_rate: f64) -> HyperLogLog {
assert!(error_rate > 0.0 && error_rate < 1.0);
let sr = 1.04 / error_rate;
let p = (sr * sr).ln().ceil() as u8;
let alpha = HyperLogLog::get_alpha(p);
let m = 1u << p;
HyperLogLog{alpha: alpha,
p: p,
m: m,
M: vec::from_elem(m, 0u8),
hash_key_1: rand::random(),
hash_key_2: rand::random(),}
}
pub fn new_from_template(hll: &HyperLogLog) -> HyperLogLog {
HyperLogLog{alpha: hll.alpha,
p: hll.p,
m: hll.m,
M: vec::from_elem(hll.m, 0u8),
hash_key_1: hll.hash_key_1,
hash_key_2: hll.hash_key_2}
}
pub fn insert(&mut self, value: &str) {
let x = value.hash_keyed(self.hash_key_1, self.hash_key_2) as u64;
let j = x & (self.m - 1) as u64;
let w = x >> self.p;
let rho = HyperLogLog::get_rho(w, 64 - self.p);
if rho > self.M[j] {
self.M[j] = rho;
}
}
pub fn len(&self) -> f64 {
let V = HyperLogLog::vec_count_zero(self.M);
if V > 0 {
let H = self.m as f64 * (self.m as f64 / V as f64).ln();
if H <= HyperLogLog::get_treshold(self.p) {
H
} else {
self.ep()
}
} else {
self.ep()
}
}
pub fn merge(&mut self, src: &HyperLogLog) {
assert!(src.alpha == self.alpha);
assert!(src.p == self.p);
assert!(src.m == self.m);
assert!(src.hash_key_1 == self.hash_key_1);
assert!(src.hash_key_2 == self.hash_key_2);
for i in range(0, self.m) {
if src.M[i] > self.M[i] {
self.M[i] = src.M[i];
}
}
}
pub fn clear(&mut self) {
self.M.mut_iter().advance(|x| { *x = 0; true });
}
fn get_treshold(p: u8) -> f64 {
TRESHOLD_DATA[p]
}
fn get_alpha(p: u8) -> f64 {
assert!(p >= 4 && p <= 16);
match p {
4 => 0.673,
5 => 0.697,
6 => 0.709,
_ => 0.7213 / (1.0 + 1.079 / (1 << p) as f64)
}
}
fn bit_length(x: u64) -> u8 {
let mut bits: u8 = 0;
let mut xm = x;
while xm != 0 {
bits += 1;
xm >>= 1;
}
bits
}
fn get_rho(w: u64, max_width: u8) -> u8 {
let rho = max_width - HyperLogLog::bit_length(w) + 1;
assert!(rho > 0);
rho
}
fn vec_count_zero(v: &[u8]) -> uint {
v.iter().count(|&x| x == 0)
}
fn estimate_bias(E: f64, p: u8) -> f64 {
let bias_vector = BIAS_DATA[p - 4];
let nearest_neighbors =
HyperLogLog::get_nearest_neighbors(E, RAW_ESTIMATE_DATA[p - 4]);
let sum = nearest_neighbors.iter().fold(0.0, |acc, &neighbor|
acc + bias_vector[neighbor]);
sum / nearest_neighbors.len() as f64
}
fn get_nearest_neighbors(E: f64, estimate_vector: &[f64]) -> ~[uint] {
let ev_len = estimate_vector.len();
let mut r: ~[(f64, uint)] = vec::from_elem(ev_len, (0.0, 0u));
for i in range(0u, ev_len) {
let dr = E - estimate_vector[i];
r[i] = (dr * dr, i);
}
r.sort_by(|a, b|
if a < b { Less } else if a > b { Greater } else { Equal });
let top = r.slice(0, 6);
top.map(|&ez| match ez { (_, b) => b })
}
fn ep(&self) -> f64 {
let sum = self.M.iter().fold(0.0, |acc, &x|
acc + num::powf(2.0, -(x as f64)));
let E = self.alpha * (self.m * self.m) as f64 / sum;
if E <= (5 * self.m) as f64 {
E - HyperLogLog::estimate_bias(E, self.p)
} else {
E
}
}
}
#[test]
fn hyperloglog_test_simple() {
let mut hll = HyperLogLog::new(0.00408);
let keys = ["test1", "test2", "test3", "test2", "test2", "test2"];
for &k in keys.iter() {
hll.insert(k);
}
assert!(hll.len().round() == 3.0);
hll.clear();
assert!(hll.len().round() == 0.0);
}
#[test]
fn hyperloglog_test_merge() {
let mut hll = HyperLogLog::new(0.00408);
let keys = ["test1", "test2", "test3", "test2", "test2", "test2"];
for &k in keys.iter() {
hll.insert(k);
}
assert!(hll.len().round() == 3.0);
let mut hll2 = HyperLogLog::new_from_template(&hll);
let keys2 = ["test3", "test4", "test4", "test4", "test4", "test1"];
for &k in keys2.iter() {
hll2.insert(k);
}
assert!(hll2.len().round() == 3.0);
hll.merge(&hll2);
assert!(hll.len().round() == 4.0);
}
|
use smallvec::SmallVec;
use dovahkiin::types::custom_types::id::Id;
use utils::lru_cache::LRUCache;
use std::io::Cursor;
use std::rc::Rc;
use ram::types::RandValue;
use client::AsyncClient;
use std::cmp::{max, min};
use std::ptr;
use std::mem;
use std::sync::Arc;
use itertools::{Itertools, chain};
use ram::cell::Cell;
use futures::Future;
use dovahkiin::types;
use dovahkiin::types::{Value, Map, PrimitiveArray, ToValue, key_hash};
use index::btree::external::*;
use index::btree::internal::*;
use bifrost::utils::async_locks::RwLock;
use hermes::stm::{TxnManager, TxnValRef, Txn, TxnErr};
use bifrost::utils::async_locks::Mutex;
mod internal;
mod external;
const ID_SIZE: usize = 16;
const NUM_KEYS: usize = 2048;
const NUM_PTRS: usize = NUM_KEYS + 1;
const CACHE_SIZE: usize = 2048;
type EntryKey = SmallVec<[u8; 32]>;
type EntryKeySlice = [EntryKey; NUM_KEYS];
type NodePointerSlice = [TxnValRef; NUM_PTRS];
#[derive(Clone)]
enum Node {
External(Id),
Internal(Box<InNode>),
None
}
pub struct RTCursor {
index: usize,
version: u64,
id: Id
}
pub struct BPlusTree {
root: TxnValRef,
num_nodes: usize,
height: usize,
ext_node_cache: ExtNodeCacheMap,
stm: TxnManager,
}
macro_rules! make_array {
($n: expr, $constructor:expr) => {
unsafe {
let mut items: [_; $n] = mem::uninitialized();
for place in items.iter_mut() {
ptr::write(place, $constructor);
}
items
}
};
}
impl BPlusTree {
pub fn new(neb_client: &Arc<AsyncClient>) -> BPlusTree {
let neb_client_1 = neb_client.clone();
let neb_client_2 = neb_client.clone();
let mut tree = BPlusTree {
root: Default::default(),
num_nodes: 0,
height: 0,
stm: TxnManager::new(),
ext_node_cache:
Mutex::new(
LRUCache::new(
CACHE_SIZE,
move |id|{
neb_client_1.read_cell(*id).wait().unwrap().map(|cell| {
RwLock::new(ExtNode::from_cell(cell))
}).ok()
},
move |_, value| {
let cache = value.read();
let cell = cache.to_cell();
neb_client_2.transaction(move |txn| {
let cell_owned = (&cell).to_owned();
txn.upsert(cell_owned)
}).wait().unwrap()
}))
};
{
let actual_tree_root = tree.new_ext_cached_node();
let root_ref = tree.stm.with_value(actual_tree_root);
tree.root = root_ref;
}
return tree;
}
pub fn seek(&self, key: &EntryKey) -> RTCursor {
return self.stm.transaction(|txn| {
let mut bz = CacheBufferZone::new(self);
self.search(self.root, key, txn, &mut bz)
}).unwrap();
}
fn search(
&self,
node: TxnValRef,
key: &EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone) -> Result<RTCursor, TxnErr>
{
let node = txn.read::<Node>(node)?.unwrap();
let pos = node.search(key, bz);
if node.is_ext() {
let extnode = node.extnode(bz);
Ok(RTCursor {
index: pos,
version: extnode.version,
id: extnode.id
})
} else if let Node::Internal(n) = *node {
let next_node_ref = n.pointers[pos];
self.search(next_node_ref, key, txn, bz)
} else {
unreachable!()
}
}
pub fn insert(&self, mut key: EntryKey, id: &Id) {
key_with_id(&mut key, id);
self.stm.transaction(|txn| {
let mut bz = CacheBufferZone::new(self);
if let Some((new_node, pivotKey)) = self.insert_to_node(self.root, key, txn, &mut bz)? {
// split root
let new_node_ref = txn.new_value(new_node);
let pivot = pivotKey.unwrap_or_else(|| new_node.first_key(&mut bz));
let mut new_in_root = InNode {
keys: make_array!(NUM_KEYS, Default::default()),
pointers: make_array!(NUM_PTRS, Default::default()),
len: 1
};
let old_root = txn.read_owned::<Node>(self.root)?.unwrap();
let old_root_ref = txn.new_value(old_root);
new_in_root.keys[0] = pivot;
new_in_root.pointers[0] = old_root_ref;
new_in_root.pointers[1] = new_node_ref;
let new_root = Node::Internal(box new_in_root);
txn.update(self.root, new_root);
}
return Ok(bz);
}).unwrap().flush();
}
fn insert_to_node(
&self,
node: TxnValRef,
key: EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone
) -> Result<Option<(Node, Option<EntryKey>)>, TxnErr> {
let mut acq_node = txn.read::<Node>(node)?.unwrap();
let pos = acq_node.search(&key, bz);
let split_node = match &*acq_node {
&Node::External(ref id) => {
let node = bz.get_for_mut(id);
return Ok(node.insert(key, pos, self));
},
&Node::Internal(ref n) => {
let next_node_ref = n.pointers[pos];
self.insert_to_node(next_node_ref, key, txn, bz)?
},
&Node::None => unreachable!()
};
match split_node {
Some((new_node, pivot_key)) => {
assert!(!(!new_node.is_ext() && pivot_key.is_none()));
let new_node_ref = txn.new_value(new_node);
let pivot = pivot_key.unwrap_or_else(|| new_node.first_key(bz));
let mut acq_node = txn.read_owned::<Node>(node)?.unwrap();
let result = acq_node.insert(
pivot,
Some(new_node_ref),
pos + 1,
self, bz);
txn.update(node, acq_node);
return Ok(result);
},
None => return Ok(None)
}
}
fn remove(&self, key: &EntryKey, id: &Id) -> bool {
let mut key = key.clone();
key_with_id(&mut key, id);
let (removed, bz) = self.stm.transaction(|txn| {
let mut bz = CacheBufferZone::new(self);
let removed = self.remove_from_node(self.root, &key, txn, &mut bz)?.is_some();
let root_node = txn.read::<Node>(self.root)?.unwrap();
if removed && !root_node.is_ext() && root_node.len(&mut bz) == 0 {
txn.update(self.root, self.new_ext_cached_node());
}
Ok((removed, bz))
}).unwrap();
bz.flush();
return removed;
}
fn remove_from_node(
&self,
node: TxnValRef,
key: &EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone
) -> Result<Option<()>, TxnErr> {
let node_ref = txn.read::<Node>(node)?.unwrap();
let key_pos = node_ref.search(key, bz);
if let Node::Internal(n) = &*node_ref {
let pointer_pos = key_pos + 1;
let result = self.remove_from_node(n.pointers[pointer_pos], key, txn, bz)?;
if result.is_none() { return Ok(result) }
let sub_node = n.pointers[pointer_pos];
let sub_node_ref = txn.read::<Node>(sub_node)?.unwrap();
let mut node_owned = txn.read_owned::<Node>(node)?.unwrap();
{
let mut n = node_owned.innode_mut();
if sub_node_ref.len(bz) == 0 {
// need to remove empty child node
if sub_node_ref.is_ext() {
// empty external node should be removed and rearrange 'next' and 'prev' pointer for neighbourhoods
let extnode = sub_node_ref.extnode(bz);
let nid = rearrange_empty_extnode(extnode, bz);
n.remove(pointer_pos);
bz.delete(&nid)
} else {
// empty internal nodes should be replaced with it's only remaining child pointer
// there must be at least one child pointer exists
n.pointers[pointer_pos] = sub_node_ref.innode().pointers[0];
}
txn.delete(sub_node);
} else if !sub_node_ref.is_half_full(bz) {
// need to rebalance
let cand_key_pos = n.rebalance_candidate(key_pos, txn, bz)?;
let cand_ptr_pos = cand_key_pos + 1;
let left_ptr_pos = min(pointer_pos, cand_ptr_pos);
let right_ptr_pos = max(pointer_pos, cand_ptr_pos);
if sub_node_ref.cannot_merge(bz) {
// relocate
n.relocate_children(left_ptr_pos, right_ptr_pos, txn, bz)?;
} else {
// merge
n.merge_children(left_ptr_pos, right_ptr_pos, txn, bz);
n.remove(right_ptr_pos - 1);
}
}
}
txn.update(node, node_owned);
return Ok(result);
} else if let &Node::External(ref id) = &*node_ref {
let mut cached_node = bz.get_for_mut(id);
if &cached_node.keys[key_pos] == key {
cached_node.remove(key_pos);
return Ok(Some(()));
} else {
return Ok(None);
}
} else { unreachable!() }
}
fn get_mut_ext_node_cached(&self, id: &Id) -> ExtNodeCachedMut {
let mut map = self.ext_node_cache.lock();
return map.get_or_fetch(id).unwrap().write();
}
fn get_ext_node_cached(&self, id: &Id) -> ExtNodeCachedImmute {
let mut map = self.ext_node_cache.lock();
return map.get_or_fetch(id).unwrap().read();
}
fn new_page_id(&self) -> Id {
// TODO: achieve locality
Id::rand()
}
fn new_ext_cached_node(&self) -> Node {
let id = self.new_page_id();
let node = ExtNode::new(&id);
self.ext_node_cache.lock().insert(id, RwLock::new(node));
return Node::External(id);
}
}
impl Node {
fn search(&self, key: &EntryKey, bz: &mut CacheBufferZone) -> usize {
if self.is_ext() {
self.extnode(bz).keys.binary_search(key).unwrap_or_else(|i| i)
} else {
self.innode().keys.binary_search(key).unwrap_or_else(|i| i)
}
}
fn insert(
&mut self,
key: EntryKey,
ptr: Option<TxnValRef>,
pos: usize,
tree: &BPlusTree,
bz: &mut CacheBufferZone) -> Option<(Node, Option<EntryKey>)>
{
if let &mut Node::External(ref id) = self {
self.extnode_mut(bz).insert(key, pos, tree)
} else {
self.innode_mut().insert(key, ptr, pos)
}
}
fn remove(&mut self, pos: usize, bz: &mut CacheBufferZone) {
if let &mut Node::External(ref id) = self {
self.extnode_mut(bz).remove(pos)
} else {
self.innode_mut().remove(pos)
}
}
fn is_ext(&self) -> bool {
match self {
&Node::External(_) => true,
&Node::Internal(_) => false,
&Node::None => panic!()
}
}
fn first_key(&self, bz: &mut CacheBufferZone) -> EntryKey {
if self.is_ext() {
self.extnode(bz).keys[0].to_owned()
} else {
self.innode().keys[0].to_owned()
}
}
fn len(&self, bz: &mut CacheBufferZone) -> usize {
if self.is_ext() {
self.extnode(bz).len
} else {
self.innode().len
}
}
fn is_half_full(&self, bz: &mut CacheBufferZone) -> bool {
self.len(bz) >= NUM_KEYS / 2
}
fn cannot_merge(&self, bz: &mut CacheBufferZone) -> bool {
self.len(bz) >= NUM_KEYS/ 2 - 1
}
fn extnode_mut<'a>(&self, bz: &'a mut CacheBufferZone) -> &'a mut ExtNode {
match self {
&Node::External(ref id) => bz.get_for_mut(id),
_ => unreachable!()
}
}
fn innode_mut(&mut self) -> &mut InNode {
match self {
&mut Node::Internal(ref mut n) => n,
_ => unreachable!()
}
}
fn extnode<'a>(&self, bz: &'a mut CacheBufferZone) -> &'a ExtNode {
match self {
&Node::External(ref id) => bz.get(id),
_ => unreachable!()
}
}
fn innode(&self) -> &InNode {
match self {
&Node::Internal(ref n) => n,
_ => unreachable!()
}
}
}
fn id_from_key(key: &EntryKey) -> Id {
let mut id_cursor = Cursor::new(&key[key.len() - ID_SIZE ..]);
return Id::from_binary(&mut id_cursor).unwrap(); // read id from tailing 128 bits
}
fn key_prefixed(prefix: &EntryKey, x: &EntryKey) -> bool {
return prefix.as_slice() == &x[.. x.len() - ID_SIZE];
}
fn insert_into_split<T, S>(
item: T,
x: &mut S, y: &mut S,
xlen: &mut usize, ylen: &mut usize,
pos: usize, pivot: usize
)
where S: Slice<T>, T: Default
{
if pos <= pivot {
x.insert_at(item, pos, *xlen);
*xlen += 1;
} else {
y.insert_at(item, pos - pivot, *ylen);
*ylen += 1;
}
}
fn key_with_id(key: &mut EntryKey, id: &Id) {
let id_bytes = id.to_binary();
key.extend_from_slice(&id_bytes);
}
trait Slice<T> : Sized where T: Default{
fn as_slice(&mut self) -> &mut [T];
fn init() -> Self;
fn item_default() -> T {
T::default()
}
fn split_at_pivot(&mut self, pivot: usize, len: usize) -> Self {
let mut right_slice = Self::init();
{
let mut slice1: &mut[T] = self.as_slice();
let mut slice2: &mut[T] = right_slice.as_slice();
for i in pivot .. len { // leave pivot to the left slice
slice2[i - pivot] = mem::replace(
&mut slice1[i],
T::default());
}
}
return right_slice;
}
fn insert_at(&mut self, item: T, pos: usize, len: usize) {
assert!(pos < len);
let slice = self.as_slice();
for i in len .. pos {
slice[i] = mem::replace(&mut slice[i - 1], T::default());
}
slice[pos] = item;
}
fn remove_at(&mut self, pos: usize, len: usize) {
if pos >= len - 1 { return; }
let slice = self.as_slice();
for i in pos .. len - 1 {
slice[i] = mem::replace(&mut slice[i + 1], T::default());
}
}
}
macro_rules! impl_slice_ops {
($t: ty, $et: ty, $n: expr) => {
impl Slice<$et> for $t {
fn as_slice(&mut self) -> &mut [$et] { self }
fn init() -> Self { make_array!($n, Self::item_default()) }
}
};
}
impl_slice_ops!(EntryKeySlice, EntryKey, NUM_KEYS);
impl_slice_ops!(NodePointerSlice, TxnValRef, NUM_PTRS);
impl Default for Node {
fn default() -> Self {
Node::None
}
}
mod test {
use std::mem::size_of;
use super::Node;
#[test]
fn node_size() {
// expecting the node size to be an on-heap pointer plus node type tag, aligned.
assert_eq!(size_of::<Node>(), size_of::<usize>() * 2);
}
}
fix some of the lifetime errors
use smallvec::SmallVec;
use dovahkiin::types::custom_types::id::Id;
use utils::lru_cache::LRUCache;
use std::io::Cursor;
use std::rc::Rc;
use ram::types::RandValue;
use client::AsyncClient;
use std::cmp::{max, min};
use std::ptr;
use std::mem;
use std::sync::Arc;
use itertools::{Itertools, chain};
use ram::cell::Cell;
use futures::Future;
use dovahkiin::types;
use dovahkiin::types::{Value, Map, PrimitiveArray, ToValue, key_hash};
use index::btree::external::*;
use index::btree::internal::*;
use bifrost::utils::async_locks::RwLock;
use hermes::stm::{TxnManager, TxnValRef, Txn, TxnErr};
use bifrost::utils::async_locks::Mutex;
mod internal;
mod external;
const ID_SIZE: usize = 16;
const NUM_KEYS: usize = 2048;
const NUM_PTRS: usize = NUM_KEYS + 1;
const CACHE_SIZE: usize = 2048;
type EntryKey = SmallVec<[u8; 32]>;
type EntryKeySlice = [EntryKey; NUM_KEYS];
type NodePointerSlice = [TxnValRef; NUM_PTRS];
#[derive(Clone)]
enum Node {
External(Id),
Internal(Box<InNode>),
None
}
pub struct RTCursor {
index: usize,
version: u64,
id: Id
}
pub struct BPlusTree {
root: TxnValRef,
num_nodes: usize,
height: usize,
ext_node_cache: ExtNodeCacheMap,
stm: TxnManager,
}
macro_rules! make_array {
($n: expr, $constructor:expr) => {
unsafe {
let mut items: [_; $n] = mem::uninitialized();
for place in items.iter_mut() {
ptr::write(place, $constructor);
}
items
}
};
}
impl BPlusTree {
pub fn new(neb_client: &Arc<AsyncClient>) -> BPlusTree {
let neb_client_1 = neb_client.clone();
let neb_client_2 = neb_client.clone();
let mut tree = BPlusTree {
root: Default::default(),
num_nodes: 0,
height: 0,
stm: TxnManager::new(),
ext_node_cache:
Mutex::new(
LRUCache::new(
CACHE_SIZE,
move |id|{
neb_client_1.read_cell(*id).wait().unwrap().map(|cell| {
RwLock::new(ExtNode::from_cell(cell))
}).ok()
},
move |_, value| {
let cache = value.read();
let cell = cache.to_cell();
neb_client_2.transaction(move |txn| {
let cell_owned = (&cell).to_owned();
txn.upsert(cell_owned)
}).wait().unwrap()
}))
};
{
let actual_tree_root = tree.new_ext_cached_node();
let root_ref = tree.stm.with_value(actual_tree_root);
tree.root = root_ref;
}
return tree;
}
pub fn seek(&self, key: &EntryKey) -> RTCursor {
return self.stm.transaction(|txn| {
let mut bz = CacheBufferZone::new(self);
self.search(self.root, key, txn, &mut bz)
}).unwrap();
}
fn search(
&self,
node: TxnValRef,
key: &EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone) -> Result<RTCursor, TxnErr>
{
let node = txn.read::<Node>(node)?.unwrap();
let pos = node.search(key, bz);
if node.is_ext() {
let extnode = node.extnode(bz);
Ok(RTCursor {
index: pos,
version: extnode.version,
id: extnode.id
})
} else if let Node::Internal(ref n) = *node {
let next_node_ref = n.pointers[pos];
self.search(next_node_ref, key, txn, bz)
} else {
unreachable!()
}
}
pub fn insert(&self, mut key: EntryKey, id: &Id) {
key_with_id(&mut key, id);
self.stm.transaction(|txn| {
let key = &key;
let mut bz = CacheBufferZone::new(self);
if let Some((new_node, pivotKey)) = self.insert_to_node(
self.root,
key.clone(),
txn,
&mut bz)? {
// split root
let first_key = new_node.first_key(&mut bz);
let new_node_ref = txn.new_value(new_node);
let pivot = pivotKey.unwrap_or_else(|| first_key);
let mut new_in_root = InNode {
keys: make_array!(NUM_KEYS, Default::default()),
pointers: make_array!(NUM_PTRS, Default::default()),
len: 1
};
let old_root = txn.read_owned::<Node>(self.root)?.unwrap();
let old_root_ref = txn.new_value(old_root);
new_in_root.keys[0] = pivot;
new_in_root.pointers[0] = old_root_ref;
new_in_root.pointers[1] = new_node_ref;
let new_root = Node::Internal(box new_in_root);
txn.update(self.root, new_root);
}
return Ok(bz);
}).unwrap().flush();
}
fn insert_to_node(
&self,
node: TxnValRef,
key: EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone
) -> Result<Option<(Node, Option<EntryKey>)>, TxnErr> {
let mut acq_node = txn.read::<Node>(node)?.unwrap();
let pos = acq_node.search(&key, bz);
let split_node = match &*acq_node {
&Node::External(ref id) => {
let node = bz.get_for_mut(id);
return Ok(node.insert(key, pos, self));
},
&Node::Internal(ref n) => {
let next_node_ref = n.pointers[pos];
self.insert_to_node(next_node_ref, key, txn, bz)?
},
&Node::None => unreachable!()
};
match split_node {
Some((new_node, pivot_key)) => {
assert!(!(!new_node.is_ext() && pivot_key.is_none()));
let first_key = new_node.first_key(bz);
let new_node_ref = txn.new_value(new_node);
let pivot = pivot_key.unwrap_or_else(|| first_key);
let mut acq_node = txn.read_owned::<Node>(node)?.unwrap();
let result = acq_node.insert(
pivot,
Some(new_node_ref),
pos + 1,
self, bz);
txn.update(node, acq_node);
return Ok(result);
},
None => return Ok(None)
}
}
fn remove(&self, key: &EntryKey, id: &Id) -> bool {
let mut key = key.clone();
key_with_id(&mut key, id);
let (removed, bz) = self.stm.transaction(|txn| {
let mut bz = CacheBufferZone::new(self);
let removed = self.remove_from_node(self.root, &key, txn, &mut bz)?.is_some();
let root_node = txn.read::<Node>(self.root)?.unwrap();
if removed && !root_node.is_ext() && root_node.len(&mut bz) == 0 {
txn.update(self.root, self.new_ext_cached_node());
}
Ok((removed, bz))
}).unwrap();
bz.flush();
return removed;
}
fn remove_from_node(
&self,
node: TxnValRef,
key: &EntryKey,
txn: &mut Txn,
bz: &mut CacheBufferZone
) -> Result<Option<()>, TxnErr> {
let node_ref = txn.read::<Node>(node)?.unwrap();
let key_pos = node_ref.search(key, bz);
if let Node::Internal(n) = &*node_ref {
let pointer_pos = key_pos + 1;
let result = self.remove_from_node(n.pointers[pointer_pos], key, txn, bz)?;
if result.is_none() { return Ok(result) }
let sub_node = n.pointers[pointer_pos];
let sub_node_ref = txn.read::<Node>(sub_node)?.unwrap();
let mut node_owned = txn.read_owned::<Node>(node)?.unwrap();
{
let mut n = node_owned.innode_mut();
if sub_node_ref.len(bz) == 0 {
// need to remove empty child node
if sub_node_ref.is_ext() {
// empty external node should be removed and rearrange 'next' and 'prev' pointer for neighbourhoods
let extnode = sub_node_ref.extnode(bz);
let nid = rearrange_empty_extnode(extnode, bz);
n.remove(pointer_pos);
bz.delete(&nid)
} else {
// empty internal nodes should be replaced with it's only remaining child pointer
// there must be at least one child pointer exists
n.pointers[pointer_pos] = sub_node_ref.innode().pointers[0];
}
txn.delete(sub_node);
} else if !sub_node_ref.is_half_full(bz) {
// need to rebalance
let cand_key_pos = n.rebalance_candidate(key_pos, txn, bz)?;
let cand_ptr_pos = cand_key_pos + 1;
let left_ptr_pos = min(pointer_pos, cand_ptr_pos);
let right_ptr_pos = max(pointer_pos, cand_ptr_pos);
if sub_node_ref.cannot_merge(bz) {
// relocate
n.relocate_children(left_ptr_pos, right_ptr_pos, txn, bz)?;
} else {
// merge
n.merge_children(left_ptr_pos, right_ptr_pos, txn, bz);
n.remove(right_ptr_pos - 1);
}
}
}
txn.update(node, node_owned);
return Ok(result);
} else if let &Node::External(ref id) = &*node_ref {
let mut cached_node = bz.get_for_mut(id);
if &cached_node.keys[key_pos] == key {
cached_node.remove(key_pos);
return Ok(Some(()));
} else {
return Ok(None);
}
} else { unreachable!() }
}
fn get_mut_ext_node_cached(&self, id: &Id) -> ExtNodeCachedMut {
let mut map = self.ext_node_cache.lock();
return map.get_or_fetch(id).unwrap().write();
}
fn get_ext_node_cached(&self, id: &Id) -> ExtNodeCachedImmute {
let mut map = self.ext_node_cache.lock();
return map.get_or_fetch(id).unwrap().read();
}
fn new_page_id(&self) -> Id {
// TODO: achieve locality
Id::rand()
}
fn new_ext_cached_node(&self) -> Node {
let id = self.new_page_id();
let node = ExtNode::new(&id);
self.ext_node_cache.lock().insert(id, RwLock::new(node));
return Node::External(id);
}
}
impl Node {
fn search(&self, key: &EntryKey, bz: &mut CacheBufferZone) -> usize {
if self.is_ext() {
self.extnode(bz).keys.binary_search(key).unwrap_or_else(|i| i)
} else {
self.innode().keys.binary_search(key).unwrap_or_else(|i| i)
}
}
fn insert(
&mut self,
key: EntryKey,
ptr: Option<TxnValRef>,
pos: usize,
tree: &BPlusTree,
bz: &mut CacheBufferZone) -> Option<(Node, Option<EntryKey>)>
{
if let &mut Node::External(ref id) = self {
self.extnode_mut(bz).insert(key, pos, tree)
} else {
self.innode_mut().insert(key, ptr, pos)
}
}
fn remove(&mut self, pos: usize, bz: &mut CacheBufferZone) {
if let &mut Node::External(ref id) = self {
self.extnode_mut(bz).remove(pos)
} else {
self.innode_mut().remove(pos)
}
}
fn is_ext(&self) -> bool {
match self {
&Node::External(_) => true,
&Node::Internal(_) => false,
&Node::None => panic!()
}
}
fn first_key(&self, bz: &mut CacheBufferZone) -> EntryKey {
if self.is_ext() {
self.extnode(bz).keys[0].to_owned()
} else {
self.innode().keys[0].to_owned()
}
}
fn len(&self, bz: &mut CacheBufferZone) -> usize {
if self.is_ext() {
self.extnode(bz).len
} else {
self.innode().len
}
}
fn is_half_full(&self, bz: &mut CacheBufferZone) -> bool {
self.len(bz) >= NUM_KEYS / 2
}
fn cannot_merge(&self, bz: &mut CacheBufferZone) -> bool {
self.len(bz) >= NUM_KEYS/ 2 - 1
}
fn extnode_mut<'a>(&self, bz: &'a mut CacheBufferZone) -> &'a mut ExtNode {
match self {
&Node::External(ref id) => bz.get_for_mut(id),
_ => unreachable!()
}
}
fn innode_mut(&mut self) -> &mut InNode {
match self {
&mut Node::Internal(ref mut n) => n,
_ => unreachable!()
}
}
fn extnode<'a>(&self, bz: &'a mut CacheBufferZone) -> &'a ExtNode {
match self {
&Node::External(ref id) => bz.get(id),
_ => unreachable!()
}
}
fn innode(&self) -> &InNode {
match self {
&Node::Internal(ref n) => n,
_ => unreachable!()
}
}
}
fn id_from_key(key: &EntryKey) -> Id {
let mut id_cursor = Cursor::new(&key[key.len() - ID_SIZE ..]);
return Id::from_binary(&mut id_cursor).unwrap(); // read id from tailing 128 bits
}
fn key_prefixed(prefix: &EntryKey, x: &EntryKey) -> bool {
return prefix.as_slice() == &x[.. x.len() - ID_SIZE];
}
fn insert_into_split<T, S>(
item: T,
x: &mut S, y: &mut S,
xlen: &mut usize, ylen: &mut usize,
pos: usize, pivot: usize
)
where S: Slice<T>, T: Default
{
if pos <= pivot {
x.insert_at(item, pos, *xlen);
*xlen += 1;
} else {
y.insert_at(item, pos - pivot, *ylen);
*ylen += 1;
}
}
fn key_with_id(key: &mut EntryKey, id: &Id) {
let id_bytes = id.to_binary();
key.extend_from_slice(&id_bytes);
}
trait Slice<T> : Sized where T: Default{
fn as_slice(&mut self) -> &mut [T];
fn init() -> Self;
fn item_default() -> T {
T::default()
}
fn split_at_pivot(&mut self, pivot: usize, len: usize) -> Self {
let mut right_slice = Self::init();
{
let mut slice1: &mut[T] = self.as_slice();
let mut slice2: &mut[T] = right_slice.as_slice();
for i in pivot .. len { // leave pivot to the left slice
slice2[i - pivot] = mem::replace(
&mut slice1[i],
T::default());
}
}
return right_slice;
}
fn insert_at(&mut self, item: T, pos: usize, len: usize) {
assert!(pos < len);
let slice = self.as_slice();
for i in len .. pos {
slice[i] = mem::replace(&mut slice[i - 1], T::default());
}
slice[pos] = item;
}
fn remove_at(&mut self, pos: usize, len: usize) {
if pos >= len - 1 { return; }
let slice = self.as_slice();
for i in pos .. len - 1 {
slice[i] = mem::replace(&mut slice[i + 1], T::default());
}
}
}
macro_rules! impl_slice_ops {
($t: ty, $et: ty, $n: expr) => {
impl Slice<$et> for $t {
fn as_slice(&mut self) -> &mut [$et] { self }
fn init() -> Self { make_array!($n, Self::item_default()) }
}
};
}
impl_slice_ops!(EntryKeySlice, EntryKey, NUM_KEYS);
impl_slice_ops!(NodePointerSlice, TxnValRef, NUM_PTRS);
impl Default for Node {
fn default() -> Self {
Node::None
}
}
mod test {
use std::mem::size_of;
use super::Node;
#[test]
fn node_size() {
// expecting the node size to be an on-heap pointer plus node type tag, aligned.
assert_eq!(size_of::<Node>(), size_of::<usize>() * 2);
}
} |
/*
* This file is part of Twig (ported to Rust).
*
* For the copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Represents a Token
*
* @author Colin Kiegel <kiegel@gmx.de>
*/
/////////////
// exports //
/////////////
pub mod stream;
pub use self::stream::Stream;
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Token {
Eof,
Text(String),
BlockStart,
VarStart,
BlockEnd,
VarEnd,
Name(String),
Number(String),
String(String),
Operator(String),
Punctuation(String),
InterpolationStart,
InterpolationEnd,
}
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Type {
Eof = -1,
Text = 0,
BlockStart = 1,
VarStart = 2,
BlockEnd = 3,
VarEnd = 4,
Name = 5,
Number = 6,
String = 7,
Operator = 8,
Punctuation = 9,
InterpolationStart = 10,
InterpolationEnd = 11,
}
#[allow(dead_code)]
#[allow(unused_variables)]
impl Token {
pub fn get_value<'a>(&'a self) -> Option<&'a str> {
match *self {
Token::Eof => None,
Token::Text(ref x) => Some(x),
Token::BlockStart => None,
Token::VarStart => None,
Token::BlockEnd => None,
Token::VarEnd => None,
Token::Name(ref x) => Some(x),
Token::Number(ref x) => Some(x),
Token::String(ref x) => Some(x),
Token::Operator(ref x) => Some(x),
Token::Punctuation(ref x) => Some(x),
Token::InterpolationStart => None,
Token::InterpolationEnd => None,
}
}
pub fn get_type(&self) -> Type {
match *self {
Token::Eof => Type::Eof,
Token::Text(_) => Type::Text,
Token::BlockStart => Type::BlockStart,
Token::VarStart => Type::VarStart,
Token::BlockEnd => Type::BlockEnd,
Token::VarEnd => Type::VarEnd,
Token::Name(_) => Type::Name,
Token::Number(_) => Type::Number,
Token::String(_) => Type::String,
Token::Operator(_) => Type::Operator,
Token::Punctuation(_) => Type::Punctuation,
Token::InterpolationStart => Type::InterpolationStart,
Token::InterpolationEnd => Type::InterpolationEnd,
}
}
pub fn is_type(&self, typ: Type) -> bool {
self.get_type() == typ
}
}
impl ToString for Token {
/// Returns a string representation of the token type.
fn to_string(&self) -> String {
let typ = self.get_type().get_name(false);
match self.get_value() {
Some(ref val) => format!("{typ}({val})", typ = typ, val = val),
None => format!("{typ}", typ = typ),
}
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
impl Type {
/// Returns the name of the token type (internal representation).
///
/// # Arguments
///
/// * `short` - short or long representation
pub fn get_name(&self, short: bool) -> String {
let name = match *self {
Type::Eof => "EOF",
Type::Text => "TEXT",
Type::BlockStart => "BLOCK_START",
Type::VarStart => "VAR_START",
Type::BlockEnd => "BLOCK_END",
Type::VarEnd => "VAR_END",
Type::Name => "NAME",
Type::Number => "NUMBER",
Type::String => "STRING",
Type::Operator => "OPERATOR",
Type::Punctuation => "PUNCTUATION",
Type::InterpolationStart => "INTERPOLATION_START",
Type::InterpolationEnd => "INTERPOLATION_END",
};
if short {
name.to_string()
} else {
format!("Token::Type::{}", name)
}
}
/// Returns the description of the token type in plain english.
pub fn get_description(&self) -> String {
match *self {
Type::Eof => "end of template",
Type::Text => "text",
Type::BlockStart => "begin of statement block",
Type::VarStart => "begin of print statement",
Type::BlockEnd => "end of statement block",
Type::VarEnd => "end of print statement",
Type::Name => "name",
Type::Number => "number",
Type::String => "string",
Type::Operator => "operator",
Type::Punctuation => "punctuation",
Type::InterpolationStart => "begin of string interpolation",
Type::InterpolationEnd => "end of string interpolation",
}.to_string()
}
}
impl ToString for Type {
fn to_string(&self) -> String {
self.get_name(true)
}
}
#[cfg(test)]
mod test {
use super::{Token, Type};
#[test]
fn new_token() {
let token = Token::Text("Hello World!".to_string());
assert_eq!(token.get_value().unwrap(), "Hello World!".to_string());
assert!(token.is_type(Type::Text));
}
}
Token::Number split into Token::IntegerNumber vs. Token::FloatingNumber
/*
* This file is part of Twig (ported to Rust).
*
* For the copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Represents a Token
*
* @author Colin Kiegel <kiegel@gmx.de>
*/
/////////////
// exports //
/////////////
pub mod stream;
pub use self::stream::Stream;
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Token {
Eof,
Text(String),
BlockStart,
VarStart,
BlockEnd,
VarEnd,
Name(String),
IntegerNumber(u64), // orig. Number
FloatingNumber(f64), // orig. Number
String(String),
Operator(String),
Punctuation(String),
InterpolationStart,
InterpolationEnd,
}
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum Type {
Eof = -1,
Text = 0,
BlockStart = 1,
VarStart = 2,
BlockEnd = 3,
VarEnd = 4,
Name = 5,
Number = 6, // Floating or Integer
String = 7,
Operator = 8,
Punctuation = 9,
InterpolationStart = 10,
InterpolationEnd = 11,
}
#[allow(dead_code)]
#[allow(unused_variables)]
impl Token {
// Because of Number Types we need to return `String` copys instead of `&'a str`
pub fn get_value<'a>(&'a self) -> Option<String> {
match *self {
Token::Eof => None,
Token::Text(ref x) => Some(x.to_string()),
Token::BlockStart => None,
Token::VarStart => None,
Token::BlockEnd => None,
Token::VarEnd => None,
Token::Name(ref x) => Some(x.to_string()),
Token::IntegerNumber(ref x) => Some(x.to_string()),
Token::FloatingNumber(ref x) => Some(x.to_string()),
Token::String(ref x) => Some(x.to_string()),
Token::Operator(ref x) => Some(x.to_string()),
Token::Punctuation(ref x) => Some(x.to_string()),
Token::InterpolationStart => None,
Token::InterpolationEnd => None,
}
}
pub fn get_type(&self) -> Type {
match *self {
Token::Eof => Type::Eof,
Token::Text(_) => Type::Text,
Token::BlockStart => Type::BlockStart,
Token::VarStart => Type::VarStart,
Token::BlockEnd => Type::BlockEnd,
Token::VarEnd => Type::VarEnd,
Token::Name(_) => Type::Name,
Token::IntegerNumber(_) => Type::Number,
Token::FloatingNumber(_) => Type::Number,
Token::String(_) => Type::String,
Token::Operator(_) => Type::Operator,
Token::Punctuation(_) => Type::Punctuation,
Token::InterpolationStart => Type::InterpolationStart,
Token::InterpolationEnd => Type::InterpolationEnd,
}
}
pub fn is_type(&self, typ: Type) -> bool {
self.get_type() == typ
}
}
impl ToString for Token {
/// Returns a string representation of the token type.
fn to_string(&self) -> String {
let typ = self.get_type().get_name(false);
match self.get_value() {
Some(ref val) => format!("{typ}({val})", typ = typ, val = val),
None => format!("{typ}", typ = typ),
}
}
}
#[allow(dead_code)]
#[allow(unused_variables)]
impl Type {
/// Returns the name of the token type (internal representation).
///
/// # Arguments
///
/// * `short` - short or long representation
pub fn get_name(&self, short: bool) -> String {
let name = match *self {
Type::Eof => "EOF",
Type::Text => "TEXT",
Type::BlockStart => "BLOCK_START",
Type::VarStart => "VAR_START",
Type::BlockEnd => "BLOCK_END",
Type::VarEnd => "VAR_END",
Type::Name => "NAME",
Type::Number => "NUMBER",
Type::String => "STRING",
Type::Operator => "OPERATOR",
Type::Punctuation => "PUNCTUATION",
Type::InterpolationStart => "INTERPOLATION_START",
Type::InterpolationEnd => "INTERPOLATION_END",
};
if short {
name.to_string()
} else {
format!("Token::Type::{}", name)
}
}
/// Returns the description of the token type in plain english.
pub fn get_description(&self) -> String {
match *self {
Type::Eof => "end of template",
Type::Text => "text",
Type::BlockStart => "begin of statement block",
Type::VarStart => "begin of print statement",
Type::BlockEnd => "end of statement block",
Type::VarEnd => "end of print statement",
Type::Name => "name",
Type::Number => "number",
Type::String => "string",
Type::Operator => "operator",
Type::Punctuation => "punctuation",
Type::InterpolationStart => "begin of string interpolation",
Type::InterpolationEnd => "end of string interpolation",
}.to_string()
}
}
impl ToString for Type {
fn to_string(&self) -> String {
self.get_name(true)
}
}
#[cfg(test)]
mod test {
use super::{Token, Type};
#[test]
fn new_token() {
let token = Token::Text("Hello World!".to_string());
assert_eq!(token.get_value().unwrap(), "Hello World!".to_string());
assert!(token.is_type(Type::Text));
}
}
|
//! Basic functions for dealing with memory.
//!
//! This module contains functions for querying the size and alignment of
//! types, initializing and manipulating memory.
#![stable(feature = "rust1", since = "1.0.0")]
use crate::clone;
use crate::cmp;
use crate::fmt;
use crate::hash;
use crate::intrinsics;
use crate::marker::{Copy, DiscriminantKind, Sized};
use crate::ptr;
mod manually_drop;
#[stable(feature = "manually_drop", since = "1.20.0")]
pub use manually_drop::ManuallyDrop;
mod maybe_uninit;
#[stable(feature = "maybe_uninit", since = "1.36.0")]
pub use maybe_uninit::MaybeUninit;
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(inline)]
pub use crate::intrinsics::transmute;
/// Takes ownership and "forgets" about the value **without running its destructor**.
///
/// Any resources the value manages, such as heap memory or a file handle, will linger
/// forever in an unreachable state. However, it does not guarantee that pointers
/// to this memory will remain valid.
///
/// * If you want to leak memory, see [`Box::leak`][leak].
/// * If you want to obtain a raw pointer to the memory, see [`Box::into_raw`][into_raw].
/// * If you want to dispose of a value properly, running its destructor, see
/// [`mem::drop`][drop].
///
/// # Safety
///
/// `forget` is not marked as `unsafe`, because Rust's safety guarantees
/// do not include a guarantee that destructors will always run. For example,
/// a program can create a reference cycle using [`Rc`][rc], or call
/// [`process::exit`][exit] to exit without running destructors. Thus, allowing
/// `mem::forget` from safe code does not fundamentally change Rust's safety
/// guarantees.
///
/// That said, leaking resources such as memory or I/O objects is usually undesirable.
/// The need comes up in some specialized use cases for FFI or unsafe code, but even
/// then, [`ManuallyDrop`] is typically preferred.
///
/// Because forgetting a value is allowed, any `unsafe` code you write must
/// allow for this possibility. You cannot return a value and expect that the
/// caller will necessarily run the value's destructor.
///
/// [rc]: ../../std/rc/struct.Rc.html
/// [exit]: ../../std/process/fn.exit.html
///
/// # Examples
///
/// The canonical safe use of `mem::forget` is to circumvent a value's destructor
/// implemented by the `Drop` trait. For example, this will leak a `File`, i.e. reclaim
/// the space taken by the variable but never close the underlying system resource:
///
/// ```no_run
/// use std::mem;
/// use std::fs::File;
///
/// let file = File::open("foo.txt").unwrap();
/// mem::forget(file);
/// ```
///
/// This is useful when the ownership of the underlying resource was previously
/// transferred to code outside of Rust, for example by transmitting the raw
/// file descriptor to C code.
///
/// # Relationship with `ManuallyDrop`
///
/// While `mem::forget` can also be used to transfer *memory* ownership, doing so is error-prone.
/// [`ManuallyDrop`] should be used instead. Consider, for example, this code:
///
/// ```
/// use std::mem;
///
/// let mut v = vec![65, 122];
/// // Build a `String` using the contents of `v`
/// let s = unsafe { String::from_raw_parts(v.as_mut_ptr(), v.len(), v.capacity()) };
/// // leak `v` because its memory is now managed by `s`
/// mem::forget(v); // ERROR - v is invalid and must not be passed to a function
/// assert_eq!(s, "Az");
/// // `s` is implicitly dropped and its memory deallocated.
/// ```
///
/// There are two issues with the above example:
///
/// * If more code were added between the construction of `String` and the invocation of
/// `mem::forget()`, a panic within it would cause a double free because the same memory
/// is handled by both `v` and `s`.
/// * After calling `v.as_mut_ptr()` and transmitting the ownership of the data to `s`,
/// the `v` value is invalid. Even when a value is just moved to `mem::forget` (which won't
/// inspect it), some types have strict requirements on their values that
/// make them invalid when dangling or no longer owned. Using invalid values in any
/// way, including passing them to or returning them from functions, constitutes
/// undefined behavior and may break the assumptions made by the compiler.
///
/// Switching to `ManuallyDrop` avoids both issues:
///
/// ```
/// use std::mem::ManuallyDrop;
///
/// let v = vec![65, 122];
/// // Before we disassemble `v` into its raw parts, make sure it
/// // does not get dropped!
/// let mut v = ManuallyDrop::new(v);
/// // Now disassemble `v`. These operations cannot panic, so there cannot be a leak.
/// let (ptr, len, cap) = (v.as_mut_ptr(), v.len(), v.capacity());
/// // Finally, build a `String`.
/// let s = unsafe { String::from_raw_parts(ptr, len, cap) };
/// assert_eq!(s, "Az");
/// // `s` is implicitly dropped and its memory deallocated.
/// ```
///
/// `ManuallyDrop` robustly prevents double-free because we disable `v`'s destructor
/// before doing anything else. `mem::forget()` doesn't allow this because it consumes its
/// argument, forcing us to call it only after extracting anything we need from `v`. Even
/// if a panic were introduced between construction of `ManuallyDrop` and building the
/// string (which cannot happen in the code as shown), it would result in a leak and not a
/// double free. In other words, `ManuallyDrop` errs on the side of leaking instead of
/// erring on the side of (double-)dropping.
///
/// Also, `ManuallyDrop` prevents us from having to "touch" `v` after transferring the
/// ownership to `s` — the final step of interacting with `v` to dispose of it without
/// running its destructor is entirely avoided.
///
/// [drop]: fn.drop.html
/// [uninit]: fn.uninitialized.html
/// [clone]: ../clone/trait.Clone.html
/// [swap]: fn.swap.html
/// [box]: ../../std/boxed/struct.Box.html
/// [leak]: ../../std/boxed/struct.Box.html#method.leak
/// [into_raw]: ../../std/boxed/struct.Box.html#method.into_raw
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [`ManuallyDrop`]: struct.ManuallyDrop.html
#[inline]
#[rustc_const_unstable(feature = "const_forget", issue = "69616")]
#[stable(feature = "rust1", since = "1.0.0")]
pub const fn forget<T>(t: T) {
ManuallyDrop::new(t);
}
/// Like [`forget`], but also accepts unsized values.
///
/// This function is just a shim intended to be removed when the `unsized_locals` feature gets
/// stabilized.
///
/// [`forget`]: fn.forget.html
#[inline]
#[unstable(feature = "forget_unsized", issue = "none")]
pub fn forget_unsized<T: ?Sized>(t: T) {
// SAFETY: the forget intrinsic could be safe, but there's no point in making it safe since
// we'll be implementing this function soon via `ManuallyDrop`
unsafe { intrinsics::forget(t) }
}
/// Returns the size of a type in bytes.
///
/// More specifically, this is the offset in bytes between successive elements
/// in an array with that item type including alignment padding. Thus, for any
/// type `T` and length `n`, `[T; n]` has a size of `n * size_of::<T>()`.
///
/// In general, the size of a type is not stable across compilations, but
/// specific types such as primitives are.
///
/// The following table gives the size for primitives.
///
/// Type | size_of::\<Type>()
/// ---- | ---------------
/// () | 0
/// bool | 1
/// u8 | 1
/// u16 | 2
/// u32 | 4
/// u64 | 8
/// u128 | 16
/// i8 | 1
/// i16 | 2
/// i32 | 4
/// i64 | 8
/// i128 | 16
/// f32 | 4
/// f64 | 8
/// char | 4
///
/// Furthermore, `usize` and `isize` have the same size.
///
/// The types `*const T`, `&T`, `Box<T>`, `Option<&T>`, and `Option<Box<T>>` all have
/// the same size. If `T` is Sized, all of those types have the same size as `usize`.
///
/// The mutability of a pointer does not change its size. As such, `&T` and `&mut T`
/// have the same size. Likewise for `*const T` and `*mut T`.
///
/// # Size of `#[repr(C)]` items
///
/// The `C` representation for items has a defined layout. With this layout,
/// the size of items is also stable as long as all fields have a stable size.
///
/// ## Size of Structs
///
/// For `structs`, the size is determined by the following algorithm.
///
/// For each field in the struct ordered by declaration order:
///
/// 1. Add the size of the field.
/// 2. Round up the current size to the nearest multiple of the next field's [alignment].
///
/// Finally, round the size of the struct to the nearest multiple of its [alignment].
/// The alignment of the struct is usually the largest alignment of all its
/// fields; this can be changed with the use of `repr(align(N))`.
///
/// Unlike `C`, zero sized structs are not rounded up to one byte in size.
///
/// ## Size of Enums
///
/// Enums that carry no data other than the discriminant have the same size as C enums
/// on the platform they are compiled for.
///
/// ## Size of Unions
///
/// The size of a union is the size of its largest field.
///
/// Unlike `C`, zero sized unions are not rounded up to one byte in size.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// // Some primitives
/// assert_eq!(4, mem::size_of::<i32>());
/// assert_eq!(8, mem::size_of::<f64>());
/// assert_eq!(0, mem::size_of::<()>());
///
/// // Some arrays
/// assert_eq!(8, mem::size_of::<[i32; 2]>());
/// assert_eq!(12, mem::size_of::<[i32; 3]>());
/// assert_eq!(0, mem::size_of::<[i32; 0]>());
///
///
/// // Pointer size equality
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<*const i32>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Box<i32>>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Option<&i32>>());
/// assert_eq!(mem::size_of::<Box<i32>>(), mem::size_of::<Option<Box<i32>>>());
/// ```
///
/// Using `#[repr(C)]`.
///
/// ```
/// use std::mem;
///
/// #[repr(C)]
/// struct FieldStruct {
/// first: u8,
/// second: u16,
/// third: u8
/// }
///
/// // The size of the first field is 1, so add 1 to the size. Size is 1.
/// // The alignment of the second field is 2, so add 1 to the size for padding. Size is 2.
/// // The size of the second field is 2, so add 2 to the size. Size is 4.
/// // The alignment of the third field is 1, so add 0 to the size for padding. Size is 4.
/// // The size of the third field is 1, so add 1 to the size. Size is 5.
/// // Finally, the alignment of the struct is 2 (because the largest alignment amongst its
/// // fields is 2), so add 1 to the size for padding. Size is 6.
/// assert_eq!(6, mem::size_of::<FieldStruct>());
///
/// #[repr(C)]
/// struct TupleStruct(u8, u16, u8);
///
/// // Tuple structs follow the same rules.
/// assert_eq!(6, mem::size_of::<TupleStruct>());
///
/// // Note that reordering the fields can lower the size. We can remove both padding bytes
/// // by putting `third` before `second`.
/// #[repr(C)]
/// struct FieldStructOptimized {
/// first: u8,
/// third: u8,
/// second: u16
/// }
///
/// assert_eq!(4, mem::size_of::<FieldStructOptimized>());
///
/// // Union size is the size of the largest field.
/// #[repr(C)]
/// union ExampleUnion {
/// smaller: u8,
/// larger: u16
/// }
///
/// assert_eq!(2, mem::size_of::<ExampleUnion>());
/// ```
///
/// [alignment]: ./fn.align_of.html
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_size_of", since = "1.32.0")]
pub const fn size_of<T>() -> usize {
intrinsics::size_of::<T>()
}
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val` can be used to get the dynamically-known size.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
///
/// let x: [u8; 13] = [0; 13];
/// let y: &[u8] = &x;
/// assert_eq!(13, mem::size_of_val(y));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
intrinsics::size_of_val(val)
}
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val_raw` can be used to get the dynamically-known size.
///
/// # Safety
///
/// This function is only safe to call if the following conditions hold:
///
/// - If `T` is `Sized`, this function is always safe to call.
/// - If the unsized tail of `T` is:
/// - a [slice], then the length of the slice tail must be an intialized
/// integer, and the size of the *entire value*
/// (dynamic tail length + statically sized prefix) must fit in `isize`.
/// - a [trait object], then the vtable part of the pointer must point
/// to a valid vtable acquired by an unsizing coersion, and the size
/// of the *entire value* (dynamic tail length + statically sized prefix)
/// must fit in `isize`.
/// - an (unstable) [extern type], then this function is always safe to
/// call, but may panic or otherwise return the wrong value, as the
/// extern type's layout is not known. This is the same behavior as
/// [`size_of_val`] on a reference to an extern type tail.
/// - otherwise, it is conservatively not allowed to call this function.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
/// [extern type]: ../../unstable-book/language-features/extern-types.html
/// [`size_of_val`]: ../../core/mem/fn.size_of_val.html
///
/// # Examples
///
/// ```
/// #![feature(layout_for_ptr)]
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
///
/// let x: [u8; 13] = [0; 13];
/// let y: &[u8] = &x;
/// assert_eq!(13, unsafe { mem::size_of_val_raw(y) });
/// ```
#[inline]
#[unstable(feature = "layout_for_ptr", issue = "69835")]
pub unsafe fn size_of_val_raw<T: ?Sized>(val: *const T) -> usize {
intrinsics::size_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of::<i32>());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of` instead", since = "1.2.0")]
pub fn min_align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of_val` instead", since = "1.2.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
intrinsics::min_align_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of::<i32>());
/// ```
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_align_of", since = "1.32.0")]
pub const fn align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
min_align_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Safety
///
/// This function is only safe to call if the following conditions hold:
///
/// - If `T` is `Sized`, this function is always safe to call.
/// - If the unsized tail of `T` is:
/// - a [slice], then the length of the slice tail must be an intialized
/// integer, and the size of the *entire value*
/// (dynamic tail length + statically sized prefix) must fit in `isize`.
/// - a [trait object], then the vtable part of the pointer must point
/// to a valid vtable acquired by an unsizing coersion, and the size
/// of the *entire value* (dynamic tail length + statically sized prefix)
/// must fit in `isize`.
/// - an (unstable) [extern type], then this function is always safe to
/// call, but may panic or otherwise return the wrong value, as the
/// extern type's layout is not known. This is the same behavior as
/// [`align_of_val`] on a reference to an extern type tail.
/// - otherwise, it is conservatively not allowed to call this function.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
/// [extern type]: ../../unstable-book/language-features/extern-types.html
/// [`align_of_val`]: ../../core/mem/fn.align_of_val.html
///
/// # Examples
///
/// ```
/// #![feature(layout_for_ptr)]
/// use std::mem;
///
/// assert_eq!(4, unsafe { mem::align_of_val_raw(&5i32) });
/// ```
#[inline]
#[unstable(feature = "layout_for_ptr", issue = "69835")]
pub unsafe fn align_of_val_raw<T: ?Sized>(val: *const T) -> usize {
intrinsics::min_align_of_val(val)
}
/// Returns `true` if dropping values of type `T` matters.
///
/// This is purely an optimization hint, and may be implemented conservatively:
/// it may return `true` for types that don't actually need to be dropped.
/// As such always returning `true` would be a valid implementation of
/// this function. However if this function actually returns `false`, then you
/// can be certain dropping `T` has no side effect.
///
/// Low level implementations of things like collections, which need to manually
/// drop their data, should use this function to avoid unnecessarily
/// trying to drop all their contents when they are destroyed. This might not
/// make a difference in release builds (where a loop that has no side-effects
/// is easily detected and eliminated), but is often a big win for debug builds.
///
/// Note that [`drop_in_place`] already performs this check, so if your workload
/// can be reduced to some small number of [`drop_in_place`] calls, using this is
/// unnecessary. In particular note that you can [`drop_in_place`] a slice, and that
/// will do a single needs_drop check for all the values.
///
/// Types like Vec therefore just `drop_in_place(&mut self[..])` without using
/// `needs_drop` explicitly. Types like [`HashMap`], on the other hand, have to drop
/// values one at a time and should use this API.
///
/// [`drop_in_place`]: ../ptr/fn.drop_in_place.html
/// [`HashMap`]: ../../std/collections/struct.HashMap.html
///
/// # Examples
///
/// Here's an example of how a collection might make use of `needs_drop`:
///
/// ```
/// use std::{mem, ptr};
///
/// pub struct MyCollection<T> {
/// # data: [T; 1],
/// /* ... */
/// }
/// # impl<T> MyCollection<T> {
/// # fn iter_mut(&mut self) -> &mut [T] { &mut self.data }
/// # fn free_buffer(&mut self) {}
/// # }
///
/// impl<T> Drop for MyCollection<T> {
/// fn drop(&mut self) {
/// unsafe {
/// // drop the data
/// if mem::needs_drop::<T>() {
/// for x in self.iter_mut() {
/// ptr::drop_in_place(x);
/// }
/// }
/// self.free_buffer();
/// }
/// }
/// }
/// ```
#[inline]
#[stable(feature = "needs_drop", since = "1.21.0")]
#[rustc_const_stable(feature = "const_needs_drop", since = "1.36.0")]
pub const fn needs_drop<T>() -> bool {
intrinsics::needs_drop::<T>()
}
/// Returns the value of type `T` represented by the all-zero byte-pattern.
///
/// This means that, for example, the padding byte in `(u8, u16)` is not
/// necessarily zeroed.
///
/// There is no guarantee that an all-zero byte-pattern represents a valid value
/// of some type `T`. For example, the all-zero byte-pattern is not a valid value
/// for reference types (`&T`, `&mut T`) and functions pointers. Using `zeroed`
/// on such types causes immediate [undefined behavior][ub] because [the Rust
/// compiler assumes][inv] that there always is a valid value in a variable it
/// considers initialized.
///
/// This has the same effect as [`MaybeUninit::zeroed().assume_init()`][zeroed].
/// It is useful for FFI sometimes, but should generally be avoided.
///
/// [zeroed]: union.MaybeUninit.html#method.zeroed
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [inv]: union.MaybeUninit.html#initialization-invariant
///
/// # Examples
///
/// Correct usage of this function: initializing an integer with zero.
///
/// ```
/// use std::mem;
///
/// let x: i32 = unsafe { mem::zeroed() };
/// assert_eq!(0, x);
/// ```
///
/// *Incorrect* usage of this function: initializing a reference with zero.
///
/// ```rust,no_run
/// # #![allow(invalid_value)]
/// use std::mem;
///
/// let _x: &i32 = unsafe { mem::zeroed() }; // Undefined behavior!
/// let _y: fn() = unsafe { mem::zeroed() }; // And again!
/// ```
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated_in_future)]
#[allow(deprecated)]
#[rustc_diagnostic_item = "mem_zeroed"]
pub unsafe fn zeroed<T>() -> T {
intrinsics::assert_zero_valid::<T>();
MaybeUninit::zeroed().assume_init()
}
/// Bypasses Rust's normal memory-initialization checks by pretending to
/// produce a value of type `T`, while doing nothing at all.
///
/// **This function is deprecated.** Use [`MaybeUninit<T>`] instead.
///
/// The reason for deprecation is that the function basically cannot be used
/// correctly: it has the same effect as [`MaybeUninit::uninit().assume_init()`][uninit].
/// As the [`assume_init` documentation][assume_init] explains,
/// [the Rust compiler assumes][inv] that values are properly initialized.
/// As a consequence, calling e.g. `mem::uninitialized::<bool>()` causes immediate
/// undefined behavior for returning a `bool` that is not definitely either `true`
/// or `false`. Worse, truly uninitialized memory like what gets returned here
/// is special in that the compiler knows that it does not have a fixed value.
/// This makes it undefined behavior to have uninitialized data in a variable even
/// if that variable has an integer type.
/// (Notice that the rules around uninitialized integers are not finalized yet, but
/// until they are, it is advisable to avoid them.)
///
/// [`MaybeUninit<T>`]: union.MaybeUninit.html
/// [uninit]: union.MaybeUninit.html#method.uninit
/// [assume_init]: union.MaybeUninit.html#method.assume_init
/// [inv]: union.MaybeUninit.html#initialization-invariant
#[inline(always)]
#[rustc_deprecated(since = "1.39.0", reason = "use `mem::MaybeUninit` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated_in_future)]
#[allow(deprecated)]
#[rustc_diagnostic_item = "mem_uninitialized"]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::assert_uninit_valid::<T>();
MaybeUninit::uninit().assume_init()
}
/// Swaps the values at two mutable locations, without deinitializing either one.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// let mut x = 5;
/// let mut y = 42;
///
/// mem::swap(&mut x, &mut y);
///
/// assert_eq!(42, x);
/// assert_eq!(5, y);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap<T>(x: &mut T, y: &mut T) {
// SAFETY: the raw pointers have been created from safe mutable references satisfying all the
// constraints on `ptr::swap_nonoverlapping_one`
unsafe {
ptr::swap_nonoverlapping_one(x, y);
}
}
/// Replaces `dest` with the default value of `T`, returning the previous `dest` value.
///
/// # Examples
///
/// A simple example:
///
/// ```
/// use std::mem;
///
/// let mut v: Vec<i32> = vec![1, 2];
///
/// let old_v = mem::take(&mut v);
/// assert_eq!(vec![1, 2], old_v);
/// assert!(v.is_empty());
/// ```
///
/// `take` allows taking ownership of a struct field by replacing it with an "empty" value.
/// Without `take` you can run into issues like these:
///
/// ```compile_fail,E0507
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let buf = self.buf;
/// self.buf = Vec::new();
/// buf
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
/// `self.buf`. But `take` can be used to disassociate the original value of `self.buf` from
/// `self`, allowing it to be returned:
///
/// ```
/// use std::mem;
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// mem::take(&mut self.buf)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf.len(), 2);
///
/// assert_eq!(buffer.get_and_reset(), vec![0, 1]);
/// assert_eq!(buffer.buf.len(), 0);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
#[inline]
#[stable(feature = "mem_take", since = "1.40.0")]
pub fn take<T: Default>(dest: &mut T) -> T {
replace(dest, T::default())
}
/// Moves `src` into the referenced `dest`, returning the previous `dest` value.
///
/// Neither value is dropped.
///
/// # Examples
///
/// A simple example:
///
/// ```
/// use std::mem;
///
/// let mut v: Vec<i32> = vec![1, 2];
///
/// let old_v = mem::replace(&mut v, vec![3, 4, 5]);
/// assert_eq!(vec![1, 2], old_v);
/// assert_eq!(vec![3, 4, 5], v);
/// ```
///
/// `replace` allows consumption of a struct field by replacing it with another value.
/// Without `replace` you can run into issues like these:
///
/// ```compile_fail,E0507
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let t = self.buf[i];
/// self.buf[i] = v;
/// t
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so we can't even clone `self.buf[i]` to
/// avoid the move. But `replace` can be used to disassociate the original value at that index from
/// `self`, allowing it to be returned:
///
/// ```
/// # #![allow(dead_code)]
/// use std::mem;
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// mem::replace(&mut self.buf[i], v)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf[0], 0);
///
/// assert_eq!(buffer.replace_index(0, 2), 0);
/// assert_eq!(buffer.buf[0], 2);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "if you don't need the old value, you can just assign the new value directly"]
pub fn replace<T>(dest: &mut T, mut src: T) -> T {
swap(dest, &mut src);
src
}
/// Disposes of a value.
///
/// This does so by calling the argument's implementation of [`Drop`][drop].
///
/// This effectively does nothing for types which implement `Copy`, e.g.
/// integers. Such values are copied and _then_ moved into the function, so the
/// value persists after this function call.
///
/// This function is not magic; it is literally defined as
///
/// ```
/// pub fn drop<T>(_x: T) { }
/// ```
///
/// Because `_x` is moved into the function, it is automatically dropped before
/// the function returns.
///
/// [drop]: ../ops/trait.Drop.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let v = vec![1, 2, 3];
///
/// drop(v); // explicitly drop the vector
/// ```
///
/// Since [`RefCell`] enforces the borrow rules at runtime, `drop` can
/// release a [`RefCell`] borrow:
///
/// ```
/// use std::cell::RefCell;
///
/// let x = RefCell::new(1);
///
/// let mut mutable_borrow = x.borrow_mut();
/// *mutable_borrow = 1;
///
/// drop(mutable_borrow); // relinquish the mutable borrow on this slot
///
/// let borrow = x.borrow();
/// println!("{}", *borrow);
/// ```
///
/// Integers and other types implementing [`Copy`] are unaffected by `drop`.
///
/// ```
/// #[derive(Copy, Clone)]
/// struct Foo(u8);
///
/// let x = 1;
/// let y = Foo(2);
/// drop(x); // a copy of `x` is moved and dropped
/// drop(y); // a copy of `y` is moved and dropped
///
/// println!("x: {}, y: {}", x, y.0); // still available
/// ```
///
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Copy`]: ../../std/marker/trait.Copy.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn drop<T>(_x: T) {}
/// Interprets `src` as having type `&U`, and then reads `src` without moving
/// the contained value.
///
/// This function will unsafely assume the pointer `src` is valid for
/// [`size_of::<U>`][size_of] bytes by transmuting `&T` to `&U` and then reading
/// the `&U`. It will also unsafely create a copy of the contained value instead of
/// moving out of `src`.
///
/// It is not a compile-time error if `T` and `U` have different sizes, but it
/// is highly encouraged to only invoke this function where `T` and `U` have the
/// same size. This function triggers [undefined behavior][ub] if `U` is larger than
/// `T`.
///
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [size_of]: fn.size_of.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// #[repr(packed)]
/// struct Foo {
/// bar: u8,
/// }
///
/// let foo_array = [10u8];
///
/// unsafe {
/// // Copy the data from 'foo_array' and treat it as a 'Foo'
/// let mut foo_struct: Foo = mem::transmute_copy(&foo_array);
/// assert_eq!(foo_struct.bar, 10);
///
/// // Modify the copied data
/// foo_struct.bar = 20;
/// assert_eq!(foo_struct.bar, 20);
/// }
///
/// // The contents of 'foo_array' should not have changed
/// assert_eq!(foo_array, [10]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
// If U has a higher alignment requirement, src may not be suitably aligned.
if align_of::<U>() > align_of::<T>() {
ptr::read_unaligned(src as *const T as *const U)
} else {
ptr::read(src as *const T as *const U)
}
}
/// Opaque type representing the discriminant of an enum.
///
/// See the [`discriminant`] function in this module for more information.
///
/// [`discriminant`]: fn.discriminant.html
#[stable(feature = "discriminant_value", since = "1.21.0")]
pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant);
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> Copy for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> clone::Clone for Discriminant<T> {
fn clone(&self) -> Self {
*self
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::PartialEq for Discriminant<T> {
fn eq(&self, rhs: &Self) -> bool {
self.0 == rhs.0
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::Eq for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> hash::Hash for Discriminant<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> fmt::Debug for Discriminant<T> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_tuple("Discriminant").field(&self.0).finish()
}
}
/// Returns a value uniquely identifying the enum variant in `v`.
///
/// If `T` is not an enum, calling this function will not result in undefined behavior, but the
/// return value is unspecified.
///
/// # Stability
///
/// The discriminant of an enum variant may change if the enum definition changes. A discriminant
/// of some variant will not change between compilations with the same compiler.
///
/// # Examples
///
/// This can be used to compare enums that carry data, while disregarding
/// the actual data:
///
/// ```
/// use std::mem;
///
/// enum Foo { A(&'static str), B(i32), C(i32) }
///
/// assert_eq!(mem::discriminant(&Foo::A("bar")), mem::discriminant(&Foo::A("baz")));
/// assert_eq!(mem::discriminant(&Foo::B(1)), mem::discriminant(&Foo::B(2)));
/// assert_ne!(mem::discriminant(&Foo::B(3)), mem::discriminant(&Foo::C(3)));
/// ```
#[stable(feature = "discriminant_value", since = "1.21.0")]
#[rustc_const_unstable(feature = "const_discriminant", issue = "69821")]
pub const fn discriminant<T>(v: &T) -> Discriminant<T> {
Discriminant(intrinsics::discriminant_value(v))
}
tweak wording
//! Basic functions for dealing with memory.
//!
//! This module contains functions for querying the size and alignment of
//! types, initializing and manipulating memory.
#![stable(feature = "rust1", since = "1.0.0")]
use crate::clone;
use crate::cmp;
use crate::fmt;
use crate::hash;
use crate::intrinsics;
use crate::marker::{Copy, DiscriminantKind, Sized};
use crate::ptr;
mod manually_drop;
#[stable(feature = "manually_drop", since = "1.20.0")]
pub use manually_drop::ManuallyDrop;
mod maybe_uninit;
#[stable(feature = "maybe_uninit", since = "1.36.0")]
pub use maybe_uninit::MaybeUninit;
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(inline)]
pub use crate::intrinsics::transmute;
/// Takes ownership and "forgets" about the value **without running its destructor**.
///
/// Any resources the value manages, such as heap memory or a file handle, will linger
/// forever in an unreachable state. However, it does not guarantee that pointers
/// to this memory will remain valid.
///
/// * If you want to leak memory, see [`Box::leak`][leak].
/// * If you want to obtain a raw pointer to the memory, see [`Box::into_raw`][into_raw].
/// * If you want to dispose of a value properly, running its destructor, see
/// [`mem::drop`][drop].
///
/// # Safety
///
/// `forget` is not marked as `unsafe`, because Rust's safety guarantees
/// do not include a guarantee that destructors will always run. For example,
/// a program can create a reference cycle using [`Rc`][rc], or call
/// [`process::exit`][exit] to exit without running destructors. Thus, allowing
/// `mem::forget` from safe code does not fundamentally change Rust's safety
/// guarantees.
///
/// That said, leaking resources such as memory or I/O objects is usually undesirable.
/// The need comes up in some specialized use cases for FFI or unsafe code, but even
/// then, [`ManuallyDrop`] is typically preferred.
///
/// Because forgetting a value is allowed, any `unsafe` code you write must
/// allow for this possibility. You cannot return a value and expect that the
/// caller will necessarily run the value's destructor.
///
/// [rc]: ../../std/rc/struct.Rc.html
/// [exit]: ../../std/process/fn.exit.html
///
/// # Examples
///
/// The canonical safe use of `mem::forget` is to circumvent a value's destructor
/// implemented by the `Drop` trait. For example, this will leak a `File`, i.e. reclaim
/// the space taken by the variable but never close the underlying system resource:
///
/// ```no_run
/// use std::mem;
/// use std::fs::File;
///
/// let file = File::open("foo.txt").unwrap();
/// mem::forget(file);
/// ```
///
/// This is useful when the ownership of the underlying resource was previously
/// transferred to code outside of Rust, for example by transmitting the raw
/// file descriptor to C code.
///
/// # Relationship with `ManuallyDrop`
///
/// While `mem::forget` can also be used to transfer *memory* ownership, doing so is error-prone.
/// [`ManuallyDrop`] should be used instead. Consider, for example, this code:
///
/// ```
/// use std::mem;
///
/// let mut v = vec![65, 122];
/// // Build a `String` using the contents of `v`
/// let s = unsafe { String::from_raw_parts(v.as_mut_ptr(), v.len(), v.capacity()) };
/// // leak `v` because its memory is now managed by `s`
/// mem::forget(v); // ERROR - v is invalid and must not be passed to a function
/// assert_eq!(s, "Az");
/// // `s` is implicitly dropped and its memory deallocated.
/// ```
///
/// There are two issues with the above example:
///
/// * If more code were added between the construction of `String` and the invocation of
/// `mem::forget()`, a panic within it would cause a double free because the same memory
/// is handled by both `v` and `s`.
/// * After calling `v.as_mut_ptr()` and transmitting the ownership of the data to `s`,
/// the `v` value is invalid. Even when a value is just moved to `mem::forget` (which won't
/// inspect it), some types have strict requirements on their values that
/// make them invalid when dangling or no longer owned. Using invalid values in any
/// way, including passing them to or returning them from functions, constitutes
/// undefined behavior and may break the assumptions made by the compiler.
///
/// Switching to `ManuallyDrop` avoids both issues:
///
/// ```
/// use std::mem::ManuallyDrop;
///
/// let v = vec![65, 122];
/// // Before we disassemble `v` into its raw parts, make sure it
/// // does not get dropped!
/// let mut v = ManuallyDrop::new(v);
/// // Now disassemble `v`. These operations cannot panic, so there cannot be a leak.
/// let (ptr, len, cap) = (v.as_mut_ptr(), v.len(), v.capacity());
/// // Finally, build a `String`.
/// let s = unsafe { String::from_raw_parts(ptr, len, cap) };
/// assert_eq!(s, "Az");
/// // `s` is implicitly dropped and its memory deallocated.
/// ```
///
/// `ManuallyDrop` robustly prevents double-free because we disable `v`'s destructor
/// before doing anything else. `mem::forget()` doesn't allow this because it consumes its
/// argument, forcing us to call it only after extracting anything we need from `v`. Even
/// if a panic were introduced between construction of `ManuallyDrop` and building the
/// string (which cannot happen in the code as shown), it would result in a leak and not a
/// double free. In other words, `ManuallyDrop` errs on the side of leaking instead of
/// erring on the side of (double-)dropping.
///
/// Also, `ManuallyDrop` prevents us from having to "touch" `v` after transferring the
/// ownership to `s` — the final step of interacting with `v` to dispose of it without
/// running its destructor is entirely avoided.
///
/// [drop]: fn.drop.html
/// [uninit]: fn.uninitialized.html
/// [clone]: ../clone/trait.Clone.html
/// [swap]: fn.swap.html
/// [box]: ../../std/boxed/struct.Box.html
/// [leak]: ../../std/boxed/struct.Box.html#method.leak
/// [into_raw]: ../../std/boxed/struct.Box.html#method.into_raw
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [`ManuallyDrop`]: struct.ManuallyDrop.html
#[inline]
#[rustc_const_unstable(feature = "const_forget", issue = "69616")]
#[stable(feature = "rust1", since = "1.0.0")]
pub const fn forget<T>(t: T) {
ManuallyDrop::new(t);
}
/// Like [`forget`], but also accepts unsized values.
///
/// This function is just a shim intended to be removed when the `unsized_locals` feature gets
/// stabilized.
///
/// [`forget`]: fn.forget.html
#[inline]
#[unstable(feature = "forget_unsized", issue = "none")]
pub fn forget_unsized<T: ?Sized>(t: T) {
// SAFETY: the forget intrinsic could be safe, but there's no point in making it safe since
// we'll be implementing this function soon via `ManuallyDrop`
unsafe { intrinsics::forget(t) }
}
/// Returns the size of a type in bytes.
///
/// More specifically, this is the offset in bytes between successive elements
/// in an array with that item type including alignment padding. Thus, for any
/// type `T` and length `n`, `[T; n]` has a size of `n * size_of::<T>()`.
///
/// In general, the size of a type is not stable across compilations, but
/// specific types such as primitives are.
///
/// The following table gives the size for primitives.
///
/// Type | size_of::\<Type>()
/// ---- | ---------------
/// () | 0
/// bool | 1
/// u8 | 1
/// u16 | 2
/// u32 | 4
/// u64 | 8
/// u128 | 16
/// i8 | 1
/// i16 | 2
/// i32 | 4
/// i64 | 8
/// i128 | 16
/// f32 | 4
/// f64 | 8
/// char | 4
///
/// Furthermore, `usize` and `isize` have the same size.
///
/// The types `*const T`, `&T`, `Box<T>`, `Option<&T>`, and `Option<Box<T>>` all have
/// the same size. If `T` is Sized, all of those types have the same size as `usize`.
///
/// The mutability of a pointer does not change its size. As such, `&T` and `&mut T`
/// have the same size. Likewise for `*const T` and `*mut T`.
///
/// # Size of `#[repr(C)]` items
///
/// The `C` representation for items has a defined layout. With this layout,
/// the size of items is also stable as long as all fields have a stable size.
///
/// ## Size of Structs
///
/// For `structs`, the size is determined by the following algorithm.
///
/// For each field in the struct ordered by declaration order:
///
/// 1. Add the size of the field.
/// 2. Round up the current size to the nearest multiple of the next field's [alignment].
///
/// Finally, round the size of the struct to the nearest multiple of its [alignment].
/// The alignment of the struct is usually the largest alignment of all its
/// fields; this can be changed with the use of `repr(align(N))`.
///
/// Unlike `C`, zero sized structs are not rounded up to one byte in size.
///
/// ## Size of Enums
///
/// Enums that carry no data other than the discriminant have the same size as C enums
/// on the platform they are compiled for.
///
/// ## Size of Unions
///
/// The size of a union is the size of its largest field.
///
/// Unlike `C`, zero sized unions are not rounded up to one byte in size.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// // Some primitives
/// assert_eq!(4, mem::size_of::<i32>());
/// assert_eq!(8, mem::size_of::<f64>());
/// assert_eq!(0, mem::size_of::<()>());
///
/// // Some arrays
/// assert_eq!(8, mem::size_of::<[i32; 2]>());
/// assert_eq!(12, mem::size_of::<[i32; 3]>());
/// assert_eq!(0, mem::size_of::<[i32; 0]>());
///
///
/// // Pointer size equality
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<*const i32>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Box<i32>>());
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Option<&i32>>());
/// assert_eq!(mem::size_of::<Box<i32>>(), mem::size_of::<Option<Box<i32>>>());
/// ```
///
/// Using `#[repr(C)]`.
///
/// ```
/// use std::mem;
///
/// #[repr(C)]
/// struct FieldStruct {
/// first: u8,
/// second: u16,
/// third: u8
/// }
///
/// // The size of the first field is 1, so add 1 to the size. Size is 1.
/// // The alignment of the second field is 2, so add 1 to the size for padding. Size is 2.
/// // The size of the second field is 2, so add 2 to the size. Size is 4.
/// // The alignment of the third field is 1, so add 0 to the size for padding. Size is 4.
/// // The size of the third field is 1, so add 1 to the size. Size is 5.
/// // Finally, the alignment of the struct is 2 (because the largest alignment amongst its
/// // fields is 2), so add 1 to the size for padding. Size is 6.
/// assert_eq!(6, mem::size_of::<FieldStruct>());
///
/// #[repr(C)]
/// struct TupleStruct(u8, u16, u8);
///
/// // Tuple structs follow the same rules.
/// assert_eq!(6, mem::size_of::<TupleStruct>());
///
/// // Note that reordering the fields can lower the size. We can remove both padding bytes
/// // by putting `third` before `second`.
/// #[repr(C)]
/// struct FieldStructOptimized {
/// first: u8,
/// third: u8,
/// second: u16
/// }
///
/// assert_eq!(4, mem::size_of::<FieldStructOptimized>());
///
/// // Union size is the size of the largest field.
/// #[repr(C)]
/// union ExampleUnion {
/// smaller: u8,
/// larger: u16
/// }
///
/// assert_eq!(2, mem::size_of::<ExampleUnion>());
/// ```
///
/// [alignment]: ./fn.align_of.html
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_size_of", since = "1.32.0")]
pub const fn size_of<T>() -> usize {
intrinsics::size_of::<T>()
}
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val` can be used to get the dynamically-known size.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
///
/// let x: [u8; 13] = [0; 13];
/// let y: &[u8] = &x;
/// assert_eq!(13, mem::size_of_val(y));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
intrinsics::size_of_val(val)
}
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val_raw` can be used to get the dynamically-known size.
///
/// # Safety
///
/// This function is only safe to call if the following conditions hold:
///
/// - If `T` is `Sized`, this function is always safe to call.
/// - If the unsized tail of `T` is:
/// - a [slice], then the length of the slice tail must be an intialized
/// integer, and the size of the *entire value*
/// (dynamic tail length + statically sized prefix) must fit in `isize`.
/// - a [trait object], then the vtable part of the pointer must point
/// to a valid vtable acquired by an unsizing coersion, and the size
/// of the *entire value* (dynamic tail length + statically sized prefix)
/// must fit in `isize`.
/// - an (unstable) [extern type], then this function is always safe to
/// call, but may panic or otherwise return the wrong value, as the
/// extern type's layout is not known. This is the same behavior as
/// [`size_of_val`] on a reference to a type with extern type tail.
/// - otherwise, it is conservatively not allowed to call this function.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
/// [extern type]: ../../unstable-book/language-features/extern-types.html
/// [`size_of_val`]: ../../core/mem/fn.size_of_val.html
///
/// # Examples
///
/// ```
/// #![feature(layout_for_ptr)]
/// use std::mem;
///
/// assert_eq!(4, mem::size_of_val(&5i32));
///
/// let x: [u8; 13] = [0; 13];
/// let y: &[u8] = &x;
/// assert_eq!(13, unsafe { mem::size_of_val_raw(y) });
/// ```
#[inline]
#[unstable(feature = "layout_for_ptr", issue = "69835")]
pub unsafe fn size_of_val_raw<T: ?Sized>(val: *const T) -> usize {
intrinsics::size_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of::<i32>());
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of` instead", since = "1.2.0")]
pub fn min_align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// # #![allow(deprecated)]
/// use std::mem;
///
/// assert_eq!(4, mem::min_align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(reason = "use `align_of_val` instead", since = "1.2.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
intrinsics::min_align_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of a type.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of::<i32>());
/// ```
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_promotable]
#[rustc_const_stable(feature = "const_align_of", since = "1.32.0")]
pub const fn align_of<T>() -> usize {
intrinsics::min_align_of::<T>()
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// assert_eq!(4, mem::align_of_val(&5i32));
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated)]
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
min_align_of_val(val)
}
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
///
/// Every reference to a value of the type `T` must be a multiple of this number.
///
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
///
/// # Safety
///
/// This function is only safe to call if the following conditions hold:
///
/// - If `T` is `Sized`, this function is always safe to call.
/// - If the unsized tail of `T` is:
/// - a [slice], then the length of the slice tail must be an intialized
/// integer, and the size of the *entire value*
/// (dynamic tail length + statically sized prefix) must fit in `isize`.
/// - a [trait object], then the vtable part of the pointer must point
/// to a valid vtable acquired by an unsizing coersion, and the size
/// of the *entire value* (dynamic tail length + statically sized prefix)
/// must fit in `isize`.
/// - an (unstable) [extern type], then this function is always safe to
/// call, but may panic or otherwise return the wrong value, as the
/// extern type's layout is not known. This is the same behavior as
/// [`align_of_val`] on a reference to a type with extern type tail.
/// - otherwise, it is conservatively not allowed to call this function.
///
/// [slice]: ../../std/primitive.slice.html
/// [trait object]: ../../book/ch17-02-trait-objects.html
/// [extern type]: ../../unstable-book/language-features/extern-types.html
/// [`align_of_val`]: ../../core/mem/fn.align_of_val.html
///
/// # Examples
///
/// ```
/// #![feature(layout_for_ptr)]
/// use std::mem;
///
/// assert_eq!(4, unsafe { mem::align_of_val_raw(&5i32) });
/// ```
#[inline]
#[unstable(feature = "layout_for_ptr", issue = "69835")]
pub unsafe fn align_of_val_raw<T: ?Sized>(val: *const T) -> usize {
intrinsics::min_align_of_val(val)
}
/// Returns `true` if dropping values of type `T` matters.
///
/// This is purely an optimization hint, and may be implemented conservatively:
/// it may return `true` for types that don't actually need to be dropped.
/// As such always returning `true` would be a valid implementation of
/// this function. However if this function actually returns `false`, then you
/// can be certain dropping `T` has no side effect.
///
/// Low level implementations of things like collections, which need to manually
/// drop their data, should use this function to avoid unnecessarily
/// trying to drop all their contents when they are destroyed. This might not
/// make a difference in release builds (where a loop that has no side-effects
/// is easily detected and eliminated), but is often a big win for debug builds.
///
/// Note that [`drop_in_place`] already performs this check, so if your workload
/// can be reduced to some small number of [`drop_in_place`] calls, using this is
/// unnecessary. In particular note that you can [`drop_in_place`] a slice, and that
/// will do a single needs_drop check for all the values.
///
/// Types like Vec therefore just `drop_in_place(&mut self[..])` without using
/// `needs_drop` explicitly. Types like [`HashMap`], on the other hand, have to drop
/// values one at a time and should use this API.
///
/// [`drop_in_place`]: ../ptr/fn.drop_in_place.html
/// [`HashMap`]: ../../std/collections/struct.HashMap.html
///
/// # Examples
///
/// Here's an example of how a collection might make use of `needs_drop`:
///
/// ```
/// use std::{mem, ptr};
///
/// pub struct MyCollection<T> {
/// # data: [T; 1],
/// /* ... */
/// }
/// # impl<T> MyCollection<T> {
/// # fn iter_mut(&mut self) -> &mut [T] { &mut self.data }
/// # fn free_buffer(&mut self) {}
/// # }
///
/// impl<T> Drop for MyCollection<T> {
/// fn drop(&mut self) {
/// unsafe {
/// // drop the data
/// if mem::needs_drop::<T>() {
/// for x in self.iter_mut() {
/// ptr::drop_in_place(x);
/// }
/// }
/// self.free_buffer();
/// }
/// }
/// }
/// ```
#[inline]
#[stable(feature = "needs_drop", since = "1.21.0")]
#[rustc_const_stable(feature = "const_needs_drop", since = "1.36.0")]
pub const fn needs_drop<T>() -> bool {
intrinsics::needs_drop::<T>()
}
/// Returns the value of type `T` represented by the all-zero byte-pattern.
///
/// This means that, for example, the padding byte in `(u8, u16)` is not
/// necessarily zeroed.
///
/// There is no guarantee that an all-zero byte-pattern represents a valid value
/// of some type `T`. For example, the all-zero byte-pattern is not a valid value
/// for reference types (`&T`, `&mut T`) and functions pointers. Using `zeroed`
/// on such types causes immediate [undefined behavior][ub] because [the Rust
/// compiler assumes][inv] that there always is a valid value in a variable it
/// considers initialized.
///
/// This has the same effect as [`MaybeUninit::zeroed().assume_init()`][zeroed].
/// It is useful for FFI sometimes, but should generally be avoided.
///
/// [zeroed]: union.MaybeUninit.html#method.zeroed
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [inv]: union.MaybeUninit.html#initialization-invariant
///
/// # Examples
///
/// Correct usage of this function: initializing an integer with zero.
///
/// ```
/// use std::mem;
///
/// let x: i32 = unsafe { mem::zeroed() };
/// assert_eq!(0, x);
/// ```
///
/// *Incorrect* usage of this function: initializing a reference with zero.
///
/// ```rust,no_run
/// # #![allow(invalid_value)]
/// use std::mem;
///
/// let _x: &i32 = unsafe { mem::zeroed() }; // Undefined behavior!
/// let _y: fn() = unsafe { mem::zeroed() }; // And again!
/// ```
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated_in_future)]
#[allow(deprecated)]
#[rustc_diagnostic_item = "mem_zeroed"]
pub unsafe fn zeroed<T>() -> T {
intrinsics::assert_zero_valid::<T>();
MaybeUninit::zeroed().assume_init()
}
/// Bypasses Rust's normal memory-initialization checks by pretending to
/// produce a value of type `T`, while doing nothing at all.
///
/// **This function is deprecated.** Use [`MaybeUninit<T>`] instead.
///
/// The reason for deprecation is that the function basically cannot be used
/// correctly: it has the same effect as [`MaybeUninit::uninit().assume_init()`][uninit].
/// As the [`assume_init` documentation][assume_init] explains,
/// [the Rust compiler assumes][inv] that values are properly initialized.
/// As a consequence, calling e.g. `mem::uninitialized::<bool>()` causes immediate
/// undefined behavior for returning a `bool` that is not definitely either `true`
/// or `false`. Worse, truly uninitialized memory like what gets returned here
/// is special in that the compiler knows that it does not have a fixed value.
/// This makes it undefined behavior to have uninitialized data in a variable even
/// if that variable has an integer type.
/// (Notice that the rules around uninitialized integers are not finalized yet, but
/// until they are, it is advisable to avoid them.)
///
/// [`MaybeUninit<T>`]: union.MaybeUninit.html
/// [uninit]: union.MaybeUninit.html#method.uninit
/// [assume_init]: union.MaybeUninit.html#method.assume_init
/// [inv]: union.MaybeUninit.html#initialization-invariant
#[inline(always)]
#[rustc_deprecated(since = "1.39.0", reason = "use `mem::MaybeUninit` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(deprecated_in_future)]
#[allow(deprecated)]
#[rustc_diagnostic_item = "mem_uninitialized"]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::assert_uninit_valid::<T>();
MaybeUninit::uninit().assume_init()
}
/// Swaps the values at two mutable locations, without deinitializing either one.
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// let mut x = 5;
/// let mut y = 42;
///
/// mem::swap(&mut x, &mut y);
///
/// assert_eq!(42, x);
/// assert_eq!(5, y);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap<T>(x: &mut T, y: &mut T) {
// SAFETY: the raw pointers have been created from safe mutable references satisfying all the
// constraints on `ptr::swap_nonoverlapping_one`
unsafe {
ptr::swap_nonoverlapping_one(x, y);
}
}
/// Replaces `dest` with the default value of `T`, returning the previous `dest` value.
///
/// # Examples
///
/// A simple example:
///
/// ```
/// use std::mem;
///
/// let mut v: Vec<i32> = vec![1, 2];
///
/// let old_v = mem::take(&mut v);
/// assert_eq!(vec![1, 2], old_v);
/// assert!(v.is_empty());
/// ```
///
/// `take` allows taking ownership of a struct field by replacing it with an "empty" value.
/// Without `take` you can run into issues like these:
///
/// ```compile_fail,E0507
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let buf = self.buf;
/// self.buf = Vec::new();
/// buf
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
/// `self.buf`. But `take` can be used to disassociate the original value of `self.buf` from
/// `self`, allowing it to be returned:
///
/// ```
/// use std::mem;
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// mem::take(&mut self.buf)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf.len(), 2);
///
/// assert_eq!(buffer.get_and_reset(), vec![0, 1]);
/// assert_eq!(buffer.buf.len(), 0);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
#[inline]
#[stable(feature = "mem_take", since = "1.40.0")]
pub fn take<T: Default>(dest: &mut T) -> T {
replace(dest, T::default())
}
/// Moves `src` into the referenced `dest`, returning the previous `dest` value.
///
/// Neither value is dropped.
///
/// # Examples
///
/// A simple example:
///
/// ```
/// use std::mem;
///
/// let mut v: Vec<i32> = vec![1, 2];
///
/// let old_v = mem::replace(&mut v, vec![3, 4, 5]);
/// assert_eq!(vec![1, 2], old_v);
/// assert_eq!(vec![3, 4, 5], v);
/// ```
///
/// `replace` allows consumption of a struct field by replacing it with another value.
/// Without `replace` you can run into issues like these:
///
/// ```compile_fail,E0507
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let t = self.buf[i];
/// self.buf[i] = v;
/// t
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so we can't even clone `self.buf[i]` to
/// avoid the move. But `replace` can be used to disassociate the original value at that index from
/// `self`, allowing it to be returned:
///
/// ```
/// # #![allow(dead_code)]
/// use std::mem;
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// mem::replace(&mut self.buf[i], v)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf[0], 0);
///
/// assert_eq!(buffer.replace_index(0, 2), 0);
/// assert_eq!(buffer.buf[0], 2);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "if you don't need the old value, you can just assign the new value directly"]
pub fn replace<T>(dest: &mut T, mut src: T) -> T {
swap(dest, &mut src);
src
}
/// Disposes of a value.
///
/// This does so by calling the argument's implementation of [`Drop`][drop].
///
/// This effectively does nothing for types which implement `Copy`, e.g.
/// integers. Such values are copied and _then_ moved into the function, so the
/// value persists after this function call.
///
/// This function is not magic; it is literally defined as
///
/// ```
/// pub fn drop<T>(_x: T) { }
/// ```
///
/// Because `_x` is moved into the function, it is automatically dropped before
/// the function returns.
///
/// [drop]: ../ops/trait.Drop.html
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// let v = vec![1, 2, 3];
///
/// drop(v); // explicitly drop the vector
/// ```
///
/// Since [`RefCell`] enforces the borrow rules at runtime, `drop` can
/// release a [`RefCell`] borrow:
///
/// ```
/// use std::cell::RefCell;
///
/// let x = RefCell::new(1);
///
/// let mut mutable_borrow = x.borrow_mut();
/// *mutable_borrow = 1;
///
/// drop(mutable_borrow); // relinquish the mutable borrow on this slot
///
/// let borrow = x.borrow();
/// println!("{}", *borrow);
/// ```
///
/// Integers and other types implementing [`Copy`] are unaffected by `drop`.
///
/// ```
/// #[derive(Copy, Clone)]
/// struct Foo(u8);
///
/// let x = 1;
/// let y = Foo(2);
/// drop(x); // a copy of `x` is moved and dropped
/// drop(y); // a copy of `y` is moved and dropped
///
/// println!("x: {}, y: {}", x, y.0); // still available
/// ```
///
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
/// [`Copy`]: ../../std/marker/trait.Copy.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn drop<T>(_x: T) {}
/// Interprets `src` as having type `&U`, and then reads `src` without moving
/// the contained value.
///
/// This function will unsafely assume the pointer `src` is valid for
/// [`size_of::<U>`][size_of] bytes by transmuting `&T` to `&U` and then reading
/// the `&U`. It will also unsafely create a copy of the contained value instead of
/// moving out of `src`.
///
/// It is not a compile-time error if `T` and `U` have different sizes, but it
/// is highly encouraged to only invoke this function where `T` and `U` have the
/// same size. This function triggers [undefined behavior][ub] if `U` is larger than
/// `T`.
///
/// [ub]: ../../reference/behavior-considered-undefined.html
/// [size_of]: fn.size_of.html
///
/// # Examples
///
/// ```
/// use std::mem;
///
/// #[repr(packed)]
/// struct Foo {
/// bar: u8,
/// }
///
/// let foo_array = [10u8];
///
/// unsafe {
/// // Copy the data from 'foo_array' and treat it as a 'Foo'
/// let mut foo_struct: Foo = mem::transmute_copy(&foo_array);
/// assert_eq!(foo_struct.bar, 10);
///
/// // Modify the copied data
/// foo_struct.bar = 20;
/// assert_eq!(foo_struct.bar, 20);
/// }
///
/// // The contents of 'foo_array' should not have changed
/// assert_eq!(foo_array, [10]);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
// If U has a higher alignment requirement, src may not be suitably aligned.
if align_of::<U>() > align_of::<T>() {
ptr::read_unaligned(src as *const T as *const U)
} else {
ptr::read(src as *const T as *const U)
}
}
/// Opaque type representing the discriminant of an enum.
///
/// See the [`discriminant`] function in this module for more information.
///
/// [`discriminant`]: fn.discriminant.html
#[stable(feature = "discriminant_value", since = "1.21.0")]
pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant);
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> Copy for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> clone::Clone for Discriminant<T> {
fn clone(&self) -> Self {
*self
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::PartialEq for Discriminant<T> {
fn eq(&self, rhs: &Self) -> bool {
self.0 == rhs.0
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> cmp::Eq for Discriminant<T> {}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> hash::Hash for Discriminant<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
#[stable(feature = "discriminant_value", since = "1.21.0")]
impl<T> fmt::Debug for Discriminant<T> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_tuple("Discriminant").field(&self.0).finish()
}
}
/// Returns a value uniquely identifying the enum variant in `v`.
///
/// If `T` is not an enum, calling this function will not result in undefined behavior, but the
/// return value is unspecified.
///
/// # Stability
///
/// The discriminant of an enum variant may change if the enum definition changes. A discriminant
/// of some variant will not change between compilations with the same compiler.
///
/// # Examples
///
/// This can be used to compare enums that carry data, while disregarding
/// the actual data:
///
/// ```
/// use std::mem;
///
/// enum Foo { A(&'static str), B(i32), C(i32) }
///
/// assert_eq!(mem::discriminant(&Foo::A("bar")), mem::discriminant(&Foo::A("baz")));
/// assert_eq!(mem::discriminant(&Foo::B(1)), mem::discriminant(&Foo::B(2)));
/// assert_ne!(mem::discriminant(&Foo::B(3)), mem::discriminant(&Foo::C(3)));
/// ```
#[stable(feature = "discriminant_value", since = "1.21.0")]
#[rustc_const_unstable(feature = "const_discriminant", issue = "69821")]
pub const fn discriminant<T>(v: &T) -> Discriminant<T> {
Discriminant(intrinsics::discriminant_value(v))
}
|
//! Types dealing with ranges of values
#[macro_escape];
extern mod extra;
use std::cmp;
use extra::time::Timespec;
macro_rules! range(
(empty) => (Range::empty());
('(', ')') => (Range::new(None, None));
('(', $h:expr ')') => (
Range::new(None, Some(RangeBound::new($h, Exclusive)))
);
('(', $h:expr ']') => (
Range::new(None, Some(RangeBound::new($h, Inclusive)))
);
('(' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)), None)
);
('[' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)), None)
);
('(' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('(' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Inclusive)))
);
('[' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('[' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Inclusive)))
)
)
/// A trait that normalizes a range bound for a type
pub trait Normalizable {
/// Given a range bound, returns the normalized version of that bound. For
/// discrete types such as i32, the normalized lower bound is always
/// inclusive and the normalized upper bound is always exclusive. Other
/// types, such as Timespec, have no normalization process so their
/// implementation is a no-op.
///
/// The logic here should match the logic performed by the equivalent
/// Postgres type.
fn normalize<S: BoundSided>(bound: RangeBound<S, Self>)
-> RangeBound<S, Self>;
}
macro_rules! bounded_normalizable(
($t:ty) => (
impl Normalizable for $t {
fn normalize<S: BoundSided>(bound: RangeBound<S, $t>)
-> RangeBound<S, $t> {
match (BoundSided::side(None::<S>), bound.type_) {
(Upper, Inclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Exclusive)
}
(Lower, Exclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Inclusive)
}
_ => bound
}
}
}
)
)
bounded_normalizable!(i32)
bounded_normalizable!(i64)
impl Normalizable for Timespec {
fn normalize<S: BoundSided>(bound: RangeBound<S, Timespec>)
-> RangeBound<S, Timespec> {
bound
}
}
#[deriving(Eq)]
enum BoundSide {
Upper,
Lower
}
trait BoundSided {
// param is a hack to get around lack of hints for self type
fn side(_: Option<Self>) -> BoundSide;
}
/// A tag type representing an upper bound
#[deriving(Eq,Clone)]
pub enum UpperBound {}
/// A tag type representing a lower bound
#[deriving(Eq,Clone)]
pub enum LowerBound {}
impl BoundSided for UpperBound {
fn side(_: Option<UpperBound>) -> BoundSide {
Upper
}
}
impl BoundSided for LowerBound {
fn side(_: Option<LowerBound>) -> BoundSide {
Lower
}
}
/// The type of a range bound
#[deriving(Eq,Clone)]
pub enum BoundType {
/// The bound includes its value
Inclusive,
/// The bound excludes its value
Exclusive
}
/// Represents a one-sided bound.
///
/// The side is determined by the `S` phantom parameter.
#[deriving(Eq,Clone)]
pub struct RangeBound<S, T> {
/// The value of the bound
value: T,
/// The type of the bound
type_: BoundType
}
impl<S: BoundSided, T: Ord> Ord for RangeBound<S, T> {
fn lt(&self, other: &RangeBound<S, T>) -> bool {
match (BoundSided::side(None::<S>), self.type_, other.type_) {
(Upper, Exclusive, Inclusive)
| (Lower, Inclusive, Exclusive) => self.value <= other.value,
_ => self.value < other.value
}
}
}
impl<S: BoundSided, T: Ord> RangeBound<S, T> {
/// Constructs a new range bound
pub fn new(value: T, type_: BoundType) -> RangeBound<S, T> {
RangeBound { value: value, type_: type_ }
}
/// Determines if a value lies within the range specified by this bound.
pub fn in_bounds(&self, value: &T) -> bool {
match (self.type_, BoundSided::side(None::<S>)) {
(Inclusive, Upper) => value <= &self.value,
(Exclusive, Upper) => value < &self.value,
(Inclusive, Lower) => value >= &self.value,
(Exclusive, Lower) => value > &self.value,
}
}
}
struct OptBound<'a, S, T>(&'a Option<RangeBound<S, T>>);
impl<'a, S: BoundSided, T: Ord> Ord for OptBound<'a, S, T> {
fn lt(&self, other: &OptBound<'a, S, T>) -> bool {
match (*self, *other) {
(OptBound(&None), OptBound(&None)) => false,
(OptBound(&None), _) => BoundSided::side(None::<S>) == Lower,
(_, OptBound(&None)) => BoundSided::side(None::<S>) == Upper,
(OptBound(&Some(ref a)), OptBound(&Some(ref b))) => a < b
}
}
}
/// Represents a range of values.
#[deriving(Eq,Clone)]
pub enum Range<T> {
priv Empty,
priv Normal(Option<RangeBound<LowerBound, T>>,
Option<RangeBound<UpperBound, T>>)
}
impl<T: Ord+Normalizable> Range<T> {
/// Creates a new range.
///
/// If a bound is `None`, the range is unbounded in that direction.
pub fn new(lower: Option<RangeBound<LowerBound, T>>,
upper: Option<RangeBound<UpperBound, T>>) -> Range<T> {
let lower = lower.map(|bound| Normalizable::normalize(bound));
let upper = upper.map(|bound| Normalizable::normalize(bound));
match (&lower, &upper) {
(&Some(ref lower), &Some(ref upper)) => {
let empty = match (lower.type_, upper.type_) {
(Inclusive, Inclusive) => lower.value > upper.value,
_ => lower.value >= upper.value
};
if empty {
return Empty;
}
}
_ => {}
}
Normal(lower, upper)
}
/// Creates a new empty range.
pub fn empty() -> Range<T> {
Empty
}
/// Determines if this range is the empty range.
pub fn is_empty(&self) -> bool {
match *self {
Empty => true,
Normal(..) => false
}
}
/// Returns the lower bound if it exists.
pub fn lower<'a>(&'a self) -> &'a Option<RangeBound<LowerBound, T>> {
match *self {
Empty => &None,
Normal(ref lower, _) => lower
}
}
/// Returns the upper bound if it exists.
pub fn upper<'a>(&'a self) -> &'a Option<RangeBound<UpperBound, T>> {
match *self {
Empty => &None,
Normal(_, ref upper) => upper
}
}
/// Determines if a value lies within this range.
pub fn contains(&self, value: &T) -> bool {
match *self {
Empty => false,
Normal(ref lower, ref upper) => {
lower.as_ref().map_default(true, |b| b.in_bounds(value)) &&
upper.as_ref().map_default(true, |b| b.in_bounds(value))
}
}
}
/// Determines if a range lies completely within this range.
pub fn contains_range(&self, other: &Range<T>) -> bool {
if other.is_empty() {
return true;
}
if self.is_empty() {
return false;
}
OptBound(self.lower()) <= OptBound(other.lower()) &&
OptBound(self.upper()) >= OptBound(other.upper())
}
}
impl<T: Ord+Normalizable+Clone> Range<T> {
/// Returns the intersection of this range with another
pub fn intersect(&self, other: &Range<T>) -> Range<T> {
if self.is_empty() || other.is_empty() {
return Range::empty();
}
let OptBound(lower) = cmp::max(OptBound(self.lower()),
OptBound(other.lower()));
let OptBound(upper) = cmp::min(OptBound(self.upper()),
OptBound(other.upper()));
Range::new(lower.clone(), upper.clone())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_range_bound_lower_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<LowerBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<LowerBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Inclusive, true);
check(1, Inclusive, 1, Exclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Exclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
}
#[test]
fn test_range_bound_upper_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<UpperBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<UpperBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Exclusive, true);
check(1, Exclusive, 1, Inclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
}
#[test]
fn test_range_bound_lower_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<LowerBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, true);
check(1, Inclusive, 0, false);
}
#[test]
fn test_range_bound_upper_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<UpperBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, false);
check(1, Inclusive, 0, true);
}
#[test]
fn test_range_contains() {
let r = range!('[' 1i32, 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&3));
assert!(r.contains(&2));
assert!(r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(' 1i32, 3i32 ')');
assert!(!r.contains(&4));
assert!(!r.contains(&3));
assert!(r.contains(&2));
assert!(!r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(', 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&2));
assert!(r.contains(&Bounded::min_value()));
let r = range!('[' 1i32, ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&4));
assert!(!r.contains(&0));
let r = range!('(', ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&0i32));
assert!(r.contains(&Bounded::min_value()));
}
#[test]
fn test_normalize_lower() {
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(10i32, Inclusive), Normalizable::normalize(r));
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(11i32, Inclusive), Normalizable::normalize(r));
}
#[test]
fn test_normalize_upper() {
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(11i32, Exclusive), Normalizable::normalize(r));
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(10i32, Exclusive), Normalizable::normalize(r));
}
#[test]
fn test_range_normalizes() {
let r1 = range!('(' 10i32, 15i32 ']');
let r2 = range!('[' 11i32, 16i32 ')');
assert_eq!(r1, r2);
}
#[test]
fn test_range_empty() {
assert!((range!('(' 9i32, 10i32 ')')).is_empty());
assert!((range!('[' 10i32, 10i32 ')')).is_empty());
assert!((range!('(' 10i32, 10i32 ']')).is_empty());
assert!((range!('[' 10i32, 9i32 ']')).is_empty());
}
#[test]
fn test_intersection() {
let r1 = range!('[' 10i32, 15i32 ')');
let r2 = range!('(' 20i32, 25i32 ']');
assert!(r1.intersect(&r2).is_empty());
assert!(r2.intersect(&r1).is_empty());
assert_eq!(r1, r1.intersect(&range!('(', ')')));
assert_eq!(r1, (range!('(', ')')).intersect(&r1));
let r2 = range!('(' 10i32, ')');
let exp = Range::new(r2.lower().clone(), r1.upper().clone());
assert_eq!(exp, r1.intersect(&r2));
assert_eq!(exp, r2.intersect(&r1));
let r2 = range!('(', 15i32 ']');
assert_eq!(r1, r1.intersect(&r2));
assert_eq!(r1, r2.intersect(&r1));
let r2 = range!('[' 11i32, 14i32 ')');
assert_eq!(r2, r1.intersect(&r2));
assert_eq!(r2, r2.intersect(&r1));
}
#[test]
fn test_contains_range() {
assert!(Range::<i32>::empty().contains_range(&Range::empty()));
let r1 = range!('[' 10i32, 15i32 ')');
assert!(r1.contains_range(&r1));
let r2 = range!('(' 10i32, ')');
assert!(!r1.contains_range(&r2));
assert!(!r2.contains_range(&r1));
let r2 = range!('(', 15i32 ']');
assert!(!r1.contains_range(&r2));
assert!(r2.contains_range(&r1));
}
}
Update for Option API change
//! Types dealing with ranges of values
#[macro_escape];
extern mod extra;
use std::cmp;
use extra::time::Timespec;
macro_rules! range(
(empty) => (Range::empty());
('(', ')') => (Range::new(None, None));
('(', $h:expr ')') => (
Range::new(None, Some(RangeBound::new($h, Exclusive)))
);
('(', $h:expr ']') => (
Range::new(None, Some(RangeBound::new($h, Inclusive)))
);
('(' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)), None)
);
('[' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)), None)
);
('(' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('(' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Inclusive)))
);
('[' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('[' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Inclusive)))
)
)
/// A trait that normalizes a range bound for a type
pub trait Normalizable {
/// Given a range bound, returns the normalized version of that bound. For
/// discrete types such as i32, the normalized lower bound is always
/// inclusive and the normalized upper bound is always exclusive. Other
/// types, such as Timespec, have no normalization process so their
/// implementation is a no-op.
///
/// The logic here should match the logic performed by the equivalent
/// Postgres type.
fn normalize<S: BoundSided>(bound: RangeBound<S, Self>)
-> RangeBound<S, Self>;
}
macro_rules! bounded_normalizable(
($t:ty) => (
impl Normalizable for $t {
fn normalize<S: BoundSided>(bound: RangeBound<S, $t>)
-> RangeBound<S, $t> {
match (BoundSided::side(None::<S>), bound.type_) {
(Upper, Inclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Exclusive)
}
(Lower, Exclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Inclusive)
}
_ => bound
}
}
}
)
)
bounded_normalizable!(i32)
bounded_normalizable!(i64)
impl Normalizable for Timespec {
fn normalize<S: BoundSided>(bound: RangeBound<S, Timespec>)
-> RangeBound<S, Timespec> {
bound
}
}
#[deriving(Eq)]
enum BoundSide {
Upper,
Lower
}
trait BoundSided {
// param is a hack to get around lack of hints for self type
fn side(_: Option<Self>) -> BoundSide;
}
/// A tag type representing an upper bound
#[deriving(Eq,Clone)]
pub enum UpperBound {}
/// A tag type representing a lower bound
#[deriving(Eq,Clone)]
pub enum LowerBound {}
impl BoundSided for UpperBound {
fn side(_: Option<UpperBound>) -> BoundSide {
Upper
}
}
impl BoundSided for LowerBound {
fn side(_: Option<LowerBound>) -> BoundSide {
Lower
}
}
/// The type of a range bound
#[deriving(Eq,Clone)]
pub enum BoundType {
/// The bound includes its value
Inclusive,
/// The bound excludes its value
Exclusive
}
/// Represents a one-sided bound.
///
/// The side is determined by the `S` phantom parameter.
#[deriving(Eq,Clone)]
pub struct RangeBound<S, T> {
/// The value of the bound
value: T,
/// The type of the bound
type_: BoundType
}
impl<S: BoundSided, T: Ord> Ord for RangeBound<S, T> {
fn lt(&self, other: &RangeBound<S, T>) -> bool {
match (BoundSided::side(None::<S>), self.type_, other.type_) {
(Upper, Exclusive, Inclusive)
| (Lower, Inclusive, Exclusive) => self.value <= other.value,
_ => self.value < other.value
}
}
}
impl<S: BoundSided, T: Ord> RangeBound<S, T> {
/// Constructs a new range bound
pub fn new(value: T, type_: BoundType) -> RangeBound<S, T> {
RangeBound { value: value, type_: type_ }
}
/// Determines if a value lies within the range specified by this bound.
pub fn in_bounds(&self, value: &T) -> bool {
match (self.type_, BoundSided::side(None::<S>)) {
(Inclusive, Upper) => value <= &self.value,
(Exclusive, Upper) => value < &self.value,
(Inclusive, Lower) => value >= &self.value,
(Exclusive, Lower) => value > &self.value,
}
}
}
struct OptBound<'a, S, T>(&'a Option<RangeBound<S, T>>);
impl<'a, S: BoundSided, T: Ord> Ord for OptBound<'a, S, T> {
fn lt(&self, other: &OptBound<'a, S, T>) -> bool {
match (*self, *other) {
(OptBound(&None), OptBound(&None)) => false,
(OptBound(&None), _) => BoundSided::side(None::<S>) == Lower,
(_, OptBound(&None)) => BoundSided::side(None::<S>) == Upper,
(OptBound(&Some(ref a)), OptBound(&Some(ref b))) => a < b
}
}
}
/// Represents a range of values.
#[deriving(Eq,Clone)]
pub enum Range<T> {
priv Empty,
priv Normal(Option<RangeBound<LowerBound, T>>,
Option<RangeBound<UpperBound, T>>)
}
impl<T: Ord+Normalizable> Range<T> {
/// Creates a new range.
///
/// If a bound is `None`, the range is unbounded in that direction.
pub fn new(lower: Option<RangeBound<LowerBound, T>>,
upper: Option<RangeBound<UpperBound, T>>) -> Range<T> {
let lower = lower.map(|bound| Normalizable::normalize(bound));
let upper = upper.map(|bound| Normalizable::normalize(bound));
match (&lower, &upper) {
(&Some(ref lower), &Some(ref upper)) => {
let empty = match (lower.type_, upper.type_) {
(Inclusive, Inclusive) => lower.value > upper.value,
_ => lower.value >= upper.value
};
if empty {
return Empty;
}
}
_ => {}
}
Normal(lower, upper)
}
/// Creates a new empty range.
pub fn empty() -> Range<T> {
Empty
}
/// Determines if this range is the empty range.
pub fn is_empty(&self) -> bool {
match *self {
Empty => true,
Normal(..) => false
}
}
/// Returns the lower bound if it exists.
pub fn lower<'a>(&'a self) -> &'a Option<RangeBound<LowerBound, T>> {
match *self {
Empty => &None,
Normal(ref lower, _) => lower
}
}
/// Returns the upper bound if it exists.
pub fn upper<'a>(&'a self) -> &'a Option<RangeBound<UpperBound, T>> {
match *self {
Empty => &None,
Normal(_, ref upper) => upper
}
}
/// Determines if a value lies within this range.
pub fn contains(&self, value: &T) -> bool {
match *self {
Empty => false,
Normal(ref lower, ref upper) => {
lower.as_ref().map_or(true, |b| b.in_bounds(value)) &&
upper.as_ref().map_or(true, |b| b.in_bounds(value))
}
}
}
/// Determines if a range lies completely within this range.
pub fn contains_range(&self, other: &Range<T>) -> bool {
if other.is_empty() {
return true;
}
if self.is_empty() {
return false;
}
OptBound(self.lower()) <= OptBound(other.lower()) &&
OptBound(self.upper()) >= OptBound(other.upper())
}
}
impl<T: Ord+Normalizable+Clone> Range<T> {
/// Returns the intersection of this range with another
pub fn intersect(&self, other: &Range<T>) -> Range<T> {
if self.is_empty() || other.is_empty() {
return Range::empty();
}
let OptBound(lower) = cmp::max(OptBound(self.lower()),
OptBound(other.lower()));
let OptBound(upper) = cmp::min(OptBound(self.upper()),
OptBound(other.upper()));
Range::new(lower.clone(), upper.clone())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_range_bound_lower_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<LowerBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<LowerBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Inclusive, true);
check(1, Inclusive, 1, Exclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Exclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
}
#[test]
fn test_range_bound_upper_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<UpperBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<UpperBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Exclusive, true);
check(1, Exclusive, 1, Inclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
}
#[test]
fn test_range_bound_lower_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<LowerBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, true);
check(1, Inclusive, 0, false);
}
#[test]
fn test_range_bound_upper_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<UpperBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, false);
check(1, Inclusive, 0, true);
}
#[test]
fn test_range_contains() {
let r = range!('[' 1i32, 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&3));
assert!(r.contains(&2));
assert!(r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(' 1i32, 3i32 ')');
assert!(!r.contains(&4));
assert!(!r.contains(&3));
assert!(r.contains(&2));
assert!(!r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(', 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&2));
assert!(r.contains(&Bounded::min_value()));
let r = range!('[' 1i32, ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&4));
assert!(!r.contains(&0));
let r = range!('(', ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&0i32));
assert!(r.contains(&Bounded::min_value()));
}
#[test]
fn test_normalize_lower() {
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(10i32, Inclusive), Normalizable::normalize(r));
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(11i32, Inclusive), Normalizable::normalize(r));
}
#[test]
fn test_normalize_upper() {
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(11i32, Exclusive), Normalizable::normalize(r));
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(10i32, Exclusive), Normalizable::normalize(r));
}
#[test]
fn test_range_normalizes() {
let r1 = range!('(' 10i32, 15i32 ']');
let r2 = range!('[' 11i32, 16i32 ')');
assert_eq!(r1, r2);
}
#[test]
fn test_range_empty() {
assert!((range!('(' 9i32, 10i32 ')')).is_empty());
assert!((range!('[' 10i32, 10i32 ')')).is_empty());
assert!((range!('(' 10i32, 10i32 ']')).is_empty());
assert!((range!('[' 10i32, 9i32 ']')).is_empty());
}
#[test]
fn test_intersection() {
let r1 = range!('[' 10i32, 15i32 ')');
let r2 = range!('(' 20i32, 25i32 ']');
assert!(r1.intersect(&r2).is_empty());
assert!(r2.intersect(&r1).is_empty());
assert_eq!(r1, r1.intersect(&range!('(', ')')));
assert_eq!(r1, (range!('(', ')')).intersect(&r1));
let r2 = range!('(' 10i32, ')');
let exp = Range::new(r2.lower().clone(), r1.upper().clone());
assert_eq!(exp, r1.intersect(&r2));
assert_eq!(exp, r2.intersect(&r1));
let r2 = range!('(', 15i32 ']');
assert_eq!(r1, r1.intersect(&r2));
assert_eq!(r1, r2.intersect(&r1));
let r2 = range!('[' 11i32, 14i32 ')');
assert_eq!(r2, r1.intersect(&r2));
assert_eq!(r2, r2.intersect(&r1));
}
#[test]
fn test_contains_range() {
assert!(Range::<i32>::empty().contains_range(&Range::empty()));
let r1 = range!('[' 10i32, 15i32 ')');
assert!(r1.contains_range(&r1));
let r2 = range!('(' 10i32, ')');
assert!(!r1.contains_range(&r2));
assert!(!r2.contains_range(&r1));
let r2 = range!('(', 15i32 ']');
assert!(!r1.contains_range(&r2));
assert!(r2.contains_range(&r1));
}
}
|
//! Types dealing with ranges of values
#[macro_escape];
extern mod extra;
use std::cmp;
use extra::time::Timespec;
macro_rules! range(
(empty) => (Range::empty());
('(', ')') => (Range::new(None, None));
('(', $h:expr ')') => (
Range::new(None, Some(RangeBound::new($h, Exclusive)))
);
('(', $h:expr ']') => (
Range::new(None, Some(RangeBound::new($h, Inclusive)))
);
('(' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)), None)
);
('[' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)), None)
);
('(' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('(' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Inclusive)))
);
('[' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('[' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Inclusive)))
)
)
/// A trait that normalizes a range bound for a type
pub trait Normalizable {
/// Given a range bound, returns the normalized version of that bound. For
/// discrete types such as i32, the normalized lower bound is always
/// inclusive and the normalized upper bound is always exclusive. Other
/// types, such as Timespec, have no normalization process so their
/// implementation is a no-op.
///
/// The logic here should match the logic performed by the equivalent
/// Postgres type.
fn normalize<S: BoundSided>(bound: RangeBound<S, Self>)
-> RangeBound<S, Self>;
}
macro_rules! bounded_normalizable(
($t:ty) => (
impl Normalizable for $t {
fn normalize<S: BoundSided>(bound: RangeBound<S, $t>)
-> RangeBound<S, $t> {
match (BoundSided::side(None::<S>), bound.type_) {
(Upper, Inclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Exclusive)
}
(Lower, Exclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Inclusive)
}
_ => bound
}
}
}
)
)
bounded_normalizable!(i32)
bounded_normalizable!(i64)
impl Normalizable for Timespec {
fn normalize<S: BoundSided>(bound: RangeBound<S, Timespec>)
-> RangeBound<S, Timespec> {
bound
}
}
#[deriving(Eq)]
enum BoundSide {
Upper,
Lower
}
trait BoundSided {
// param is a hack to get around lack of hints for self type
fn side(_: Option<Self>) -> BoundSide;
}
/// A tag type representing an upper bound
#[deriving(Eq,Clone)]
pub enum UpperBound {}
/// A tag type representing a lower bound
#[deriving(Eq,Clone)]
pub enum LowerBound {}
impl BoundSided for UpperBound {
fn side(_: Option<UpperBound>) -> BoundSide {
Upper
}
}
impl BoundSided for LowerBound {
fn side(_: Option<LowerBound>) -> BoundSide {
Lower
}
}
/// The type of a range bound
#[deriving(Eq,Clone)]
pub enum BoundType {
/// The bound includes its value
Inclusive,
/// The bound excludes its value
Exclusive
}
/// Represents a one-sided bound.
///
/// The side is determined by the `S` phantom parameter.
#[deriving(Eq,Clone)]
pub struct RangeBound<S, T> {
/// The value of the bound
value: T,
/// The type of the bound
type_: BoundType
}
impl<S: BoundSided, T: Ord> Ord for RangeBound<S, T> {
fn lt(&self, other: &RangeBound<S, T>) -> bool {
match (BoundSided::side(None::<S>), self.type_, other.type_) {
(Upper, Exclusive, Inclusive)
| (Lower, Inclusive, Exclusive) => self.value <= other.value,
_ => self.value < other.value
}
}
}
impl<S: BoundSided, T: Ord> RangeBound<S, T> {
/// Constructs a new range bound
pub fn new(value: T, type_: BoundType) -> RangeBound<S, T> {
RangeBound { value: value, type_: type_ }
}
/// Determines if a value lies within the range specified by this bound.
pub fn in_bounds(&self, value: &T) -> bool {
match (self.type_, BoundSided::side(None::<S>)) {
(Inclusive, Upper) => value <= &self.value,
(Exclusive, Upper) => value < &self.value,
(Inclusive, Lower) => value >= &self.value,
(Exclusive, Lower) => value > &self.value,
}
}
}
struct OptBound<'a, S, T>(&'a Option<RangeBound<S, T>>);
impl<'a, S: BoundSided, T: Ord> Ord for OptBound<'a, S, T> {
fn lt(&self, other: &OptBound<'a, S, T>) -> bool {
match (*self, *other) {
(OptBound(&None), OptBound(&None)) => false,
(OptBound(&None), _) => BoundSided::side(None::<S>) == Lower,
(_, OptBound(&None)) => BoundSided::side(None::<S>) == Upper,
(OptBound(&Some(ref a)), OptBound(&Some(ref b))) => a < b
}
}
}
/// Represents a range of values.
#[deriving(Eq,Clone)]
pub enum Range<T> {
priv Empty,
priv Normal(Option<RangeBound<LowerBound, T>>,
Option<RangeBound<UpperBound, T>>)
}
impl<T: Ord+Normalizable> Range<T> {
/// Creates a new range.
///
/// If a bound is `None`, the range is unbounded in that direction.
pub fn new(lower: Option<RangeBound<LowerBound, T>>,
upper: Option<RangeBound<UpperBound, T>>) -> Range<T> {
let lower = lower.map(|bound| Normalizable::normalize(bound));
let upper = upper.map(|bound| Normalizable::normalize(bound));
match (&lower, &upper) {
(&Some(ref lower), &Some(ref upper)) => {
let empty = match (lower.type_, upper.type_) {
(Inclusive, Inclusive) => lower.value > upper.value,
_ => lower.value >= upper.value
};
if empty {
return Empty;
}
}
_ => {}
}
Normal(lower, upper)
}
/// Creates a new empty range.
pub fn empty() -> Range<T> {
Empty
}
/// Determines if this range is the empty range.
pub fn is_empty(&self) -> bool {
match *self {
Empty => true,
Normal(..) => false
}
}
/// Returns the lower bound if it exists.
pub fn lower<'a>(&'a self) -> &'a Option<RangeBound<LowerBound, T>> {
match *self {
Empty => &None,
Normal(ref lower, _) => lower
}
}
/// Returns the upper bound if it exists.
pub fn upper<'a>(&'a self) -> &'a Option<RangeBound<UpperBound, T>> {
match *self {
Empty => &None,
Normal(_, ref upper) => upper
}
}
/// Determines if a value lies within this range.
pub fn contains(&self, value: &T) -> bool {
match *self {
Empty => false,
Normal(ref lower, ref upper) => {
lower.as_ref().map_default(true, |b| b.in_bounds(value)) &&
upper.as_ref().map_default(true, |b| b.in_bounds(value))
}
}
}
/// Determines if a range lies completely within this range.
pub fn contains_range(&self, other: &Range<T>) -> bool {
if other.is_empty() {
return true;
}
if self.is_empty() {
return false;
}
OptBound(self.lower()) <= OptBound(other.lower()) &&
OptBound(self.upper()) >= OptBound(other.upper())
}
}
impl<T: Ord+Normalizable+Clone> Range<T> {
/// Returns the intersection of this range with another
pub fn intersect(&self, other: &Range<T>) -> Range<T> {
if self.is_empty() || other.is_empty() {
return Range::empty();
}
let lower = cmp::max(OptBound(self.lower()), OptBound(other.lower()))
.clone();
let upper = cmp::min(OptBound(self.upper()), OptBound(other.upper()))
.clone();
Range::new(lower, upper)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_range_bound_lower_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<LowerBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<LowerBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Inclusive, true);
check(1, Inclusive, 1, Exclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Exclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
}
#[test]
fn test_range_bound_upper_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<UpperBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<UpperBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Exclusive, true);
check(1, Exclusive, 1, Inclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
}
#[test]
fn test_range_bound_lower_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<LowerBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, true);
check(1, Inclusive, 0, false);
}
#[test]
fn test_range_bound_upper_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<UpperBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, false);
check(1, Inclusive, 0, true);
}
#[test]
fn test_range_contains() {
let r = range!('[' 1i32, 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&3));
assert!(r.contains(&2));
assert!(r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(' 1i32, 3i32 ')');
assert!(!r.contains(&4));
assert!(!r.contains(&3));
assert!(r.contains(&2));
assert!(!r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(', 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&2));
assert!(r.contains(&Bounded::min_value()));
let r = range!('[' 1i32, ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&4));
assert!(!r.contains(&0));
let r = range!('(', ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&0i32));
assert!(r.contains(&Bounded::min_value()));
}
#[test]
fn test_normalize_lower() {
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(10i32, Inclusive), Normalizable::normalize(r));
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(11i32, Inclusive), Normalizable::normalize(r));
}
#[test]
fn test_normalize_upper() {
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(11i32, Exclusive), Normalizable::normalize(r));
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(10i32, Exclusive), Normalizable::normalize(r));
}
#[test]
fn test_range_normalizes() {
let r1 = range!('(' 10i32, 15i32 ']');
let r2 = range!('[' 11i32, 16i32 ')');
assert_eq!(r1, r2);
}
#[test]
fn test_range_empty() {
assert!((range!('(' 9i32, 10i32 ')')).is_empty());
assert!((range!('[' 10i32, 10i32 ')')).is_empty());
assert!((range!('(' 10i32, 10i32 ']')).is_empty());
assert!((range!('[' 10i32, 9i32 ']')).is_empty());
}
#[test]
fn test_intersection() {
let r1 = range!('[' 10i32, 15i32 ')');
let r2 = range!('(' 20i32, 25i32 ']');
assert!(r1.intersect(&r2).is_empty());
assert!(r2.intersect(&r1).is_empty());
assert_eq!(r1, r1.intersect(&range!('(', ')')));
assert_eq!(r1, (range!('(', ')')).intersect(&r1));
let r2 = range!('(' 10i32, ')');
let exp = Range::new(r2.lower().clone(), r1.upper().clone());
assert_eq!(exp, r1.intersect(&r2));
assert_eq!(exp, r2.intersect(&r1));
let r2 = range!('(', 15i32 ']');
assert_eq!(r1, r1.intersect(&r2));
assert_eq!(r1, r2.intersect(&r1));
let r2 = range!('[' 11i32, 14i32 ')');
assert_eq!(r2, r1.intersect(&r2));
assert_eq!(r2, r2.intersect(&r1));
}
#[test]
fn test_contains_range() {
assert!(Range::<i32>::empty().contains_range(&Range::empty()));
let r1 = range!('[' 10i32, 15i32 ')');
assert!(r1.contains_range(&r1));
let r2 = range!('(' 10i32, ')');
assert!(!r1.contains_range(&r2));
assert!(!r2.contains_range(&r1));
let r2 = range!('(', 15i32 ']');
assert!(!r1.contains_range(&r2));
assert!(r2.contains_range(&r1));
}
}
Update for removal of newtype deref
//! Types dealing with ranges of values
#[macro_escape];
extern mod extra;
use std::cmp;
use extra::time::Timespec;
macro_rules! range(
(empty) => (Range::empty());
('(', ')') => (Range::new(None, None));
('(', $h:expr ')') => (
Range::new(None, Some(RangeBound::new($h, Exclusive)))
);
('(', $h:expr ']') => (
Range::new(None, Some(RangeBound::new($h, Inclusive)))
);
('(' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)), None)
);
('[' $l:expr, ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)), None)
);
('(' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('(' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Exclusive)),
Some(RangeBound::new($h, Inclusive)))
);
('[' $l:expr, $h:expr ')') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Exclusive)))
);
('[' $l:expr, $h:expr ']') => (
Range::new(Some(RangeBound::new($l, Inclusive)),
Some(RangeBound::new($h, Inclusive)))
)
)
/// A trait that normalizes a range bound for a type
pub trait Normalizable {
/// Given a range bound, returns the normalized version of that bound. For
/// discrete types such as i32, the normalized lower bound is always
/// inclusive and the normalized upper bound is always exclusive. Other
/// types, such as Timespec, have no normalization process so their
/// implementation is a no-op.
///
/// The logic here should match the logic performed by the equivalent
/// Postgres type.
fn normalize<S: BoundSided>(bound: RangeBound<S, Self>)
-> RangeBound<S, Self>;
}
macro_rules! bounded_normalizable(
($t:ty) => (
impl Normalizable for $t {
fn normalize<S: BoundSided>(bound: RangeBound<S, $t>)
-> RangeBound<S, $t> {
match (BoundSided::side(None::<S>), bound.type_) {
(Upper, Inclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Exclusive)
}
(Lower, Exclusive) => {
assert!(bound.value != Bounded::max_value());
RangeBound::new(bound.value + 1, Inclusive)
}
_ => bound
}
}
}
)
)
bounded_normalizable!(i32)
bounded_normalizable!(i64)
impl Normalizable for Timespec {
fn normalize<S: BoundSided>(bound: RangeBound<S, Timespec>)
-> RangeBound<S, Timespec> {
bound
}
}
#[deriving(Eq)]
enum BoundSide {
Upper,
Lower
}
trait BoundSided {
// param is a hack to get around lack of hints for self type
fn side(_: Option<Self>) -> BoundSide;
}
/// A tag type representing an upper bound
#[deriving(Eq,Clone)]
pub enum UpperBound {}
/// A tag type representing a lower bound
#[deriving(Eq,Clone)]
pub enum LowerBound {}
impl BoundSided for UpperBound {
fn side(_: Option<UpperBound>) -> BoundSide {
Upper
}
}
impl BoundSided for LowerBound {
fn side(_: Option<LowerBound>) -> BoundSide {
Lower
}
}
/// The type of a range bound
#[deriving(Eq,Clone)]
pub enum BoundType {
/// The bound includes its value
Inclusive,
/// The bound excludes its value
Exclusive
}
/// Represents a one-sided bound.
///
/// The side is determined by the `S` phantom parameter.
#[deriving(Eq,Clone)]
pub struct RangeBound<S, T> {
/// The value of the bound
value: T,
/// The type of the bound
type_: BoundType
}
impl<S: BoundSided, T: Ord> Ord for RangeBound<S, T> {
fn lt(&self, other: &RangeBound<S, T>) -> bool {
match (BoundSided::side(None::<S>), self.type_, other.type_) {
(Upper, Exclusive, Inclusive)
| (Lower, Inclusive, Exclusive) => self.value <= other.value,
_ => self.value < other.value
}
}
}
impl<S: BoundSided, T: Ord> RangeBound<S, T> {
/// Constructs a new range bound
pub fn new(value: T, type_: BoundType) -> RangeBound<S, T> {
RangeBound { value: value, type_: type_ }
}
/// Determines if a value lies within the range specified by this bound.
pub fn in_bounds(&self, value: &T) -> bool {
match (self.type_, BoundSided::side(None::<S>)) {
(Inclusive, Upper) => value <= &self.value,
(Exclusive, Upper) => value < &self.value,
(Inclusive, Lower) => value >= &self.value,
(Exclusive, Lower) => value > &self.value,
}
}
}
struct OptBound<'a, S, T>(&'a Option<RangeBound<S, T>>);
impl<'a, S: BoundSided, T: Ord> Ord for OptBound<'a, S, T> {
fn lt(&self, other: &OptBound<'a, S, T>) -> bool {
match (*self, *other) {
(OptBound(&None), OptBound(&None)) => false,
(OptBound(&None), _) => BoundSided::side(None::<S>) == Lower,
(_, OptBound(&None)) => BoundSided::side(None::<S>) == Upper,
(OptBound(&Some(ref a)), OptBound(&Some(ref b))) => a < b
}
}
}
/// Represents a range of values.
#[deriving(Eq,Clone)]
pub enum Range<T> {
priv Empty,
priv Normal(Option<RangeBound<LowerBound, T>>,
Option<RangeBound<UpperBound, T>>)
}
impl<T: Ord+Normalizable> Range<T> {
/// Creates a new range.
///
/// If a bound is `None`, the range is unbounded in that direction.
pub fn new(lower: Option<RangeBound<LowerBound, T>>,
upper: Option<RangeBound<UpperBound, T>>) -> Range<T> {
let lower = lower.map(|bound| Normalizable::normalize(bound));
let upper = upper.map(|bound| Normalizable::normalize(bound));
match (&lower, &upper) {
(&Some(ref lower), &Some(ref upper)) => {
let empty = match (lower.type_, upper.type_) {
(Inclusive, Inclusive) => lower.value > upper.value,
_ => lower.value >= upper.value
};
if empty {
return Empty;
}
}
_ => {}
}
Normal(lower, upper)
}
/// Creates a new empty range.
pub fn empty() -> Range<T> {
Empty
}
/// Determines if this range is the empty range.
pub fn is_empty(&self) -> bool {
match *self {
Empty => true,
Normal(..) => false
}
}
/// Returns the lower bound if it exists.
pub fn lower<'a>(&'a self) -> &'a Option<RangeBound<LowerBound, T>> {
match *self {
Empty => &None,
Normal(ref lower, _) => lower
}
}
/// Returns the upper bound if it exists.
pub fn upper<'a>(&'a self) -> &'a Option<RangeBound<UpperBound, T>> {
match *self {
Empty => &None,
Normal(_, ref upper) => upper
}
}
/// Determines if a value lies within this range.
pub fn contains(&self, value: &T) -> bool {
match *self {
Empty => false,
Normal(ref lower, ref upper) => {
lower.as_ref().map_default(true, |b| b.in_bounds(value)) &&
upper.as_ref().map_default(true, |b| b.in_bounds(value))
}
}
}
/// Determines if a range lies completely within this range.
pub fn contains_range(&self, other: &Range<T>) -> bool {
if other.is_empty() {
return true;
}
if self.is_empty() {
return false;
}
OptBound(self.lower()) <= OptBound(other.lower()) &&
OptBound(self.upper()) >= OptBound(other.upper())
}
}
impl<T: Ord+Normalizable+Clone> Range<T> {
/// Returns the intersection of this range with another
pub fn intersect(&self, other: &Range<T>) -> Range<T> {
if self.is_empty() || other.is_empty() {
return Range::empty();
}
let OptBound(lower) = cmp::max(OptBound(self.lower()),
OptBound(other.lower()));
let OptBound(upper) = cmp::min(OptBound(self.upper()),
OptBound(other.upper()));
Range::new(lower.clone(), upper.clone())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_range_bound_lower_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<LowerBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<LowerBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Inclusive, true);
check(1, Inclusive, 1, Exclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Exclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
}
#[test]
fn test_range_bound_upper_lt() {
fn check(val1: int, inc1: BoundType, val2: int, inc2: BoundType, expected: bool) {
let a: RangeBound<UpperBound, int> = RangeBound::new(val1, inc1);
let b: RangeBound<UpperBound, int> = RangeBound::new(val2, inc2);
assert_eq!(expected, a < b);
}
check(1, Inclusive, 2, Exclusive, true);
check(1, Exclusive, 2, Exclusive, true);
check(1, Exclusive, 1, Inclusive, true);
check(2, Inclusive, 1, Inclusive, false);
check(2, Exclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Exclusive, false);
check(1, Inclusive, 1, Inclusive, false);
check(1, Exclusive, 1, Exclusive, false);
}
#[test]
fn test_range_bound_lower_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<LowerBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, true);
check(1, Inclusive, 0, false);
}
#[test]
fn test_range_bound_upper_in_bounds() {
fn check(bound: int, inc: BoundType, val: int, expected: bool) {
let b: RangeBound<UpperBound, int> = RangeBound::new(bound, inc);
assert_eq!(expected, b.in_bounds(&val));
}
check(1, Inclusive, 1, true);
check(1, Exclusive, 1, false);
check(1, Inclusive, 2, false);
check(1, Inclusive, 0, true);
}
#[test]
fn test_range_contains() {
let r = range!('[' 1i32, 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&3));
assert!(r.contains(&2));
assert!(r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(' 1i32, 3i32 ')');
assert!(!r.contains(&4));
assert!(!r.contains(&3));
assert!(r.contains(&2));
assert!(!r.contains(&1));
assert!(!r.contains(&0));
let r = range!('(', 3i32 ']');
assert!(!r.contains(&4));
assert!(r.contains(&2));
assert!(r.contains(&Bounded::min_value()));
let r = range!('[' 1i32, ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&4));
assert!(!r.contains(&0));
let r = range!('(', ')');
assert!(r.contains(&Bounded::max_value()));
assert!(r.contains(&0i32));
assert!(r.contains(&Bounded::min_value()));
}
#[test]
fn test_normalize_lower() {
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(10i32, Inclusive), Normalizable::normalize(r));
let r: RangeBound<LowerBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(11i32, Inclusive), Normalizable::normalize(r));
}
#[test]
fn test_normalize_upper() {
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Inclusive);
assert_eq!(RangeBound::new(11i32, Exclusive), Normalizable::normalize(r));
let r: RangeBound<UpperBound, i32> = RangeBound::new(10i32, Exclusive);
assert_eq!(RangeBound::new(10i32, Exclusive), Normalizable::normalize(r));
}
#[test]
fn test_range_normalizes() {
let r1 = range!('(' 10i32, 15i32 ']');
let r2 = range!('[' 11i32, 16i32 ')');
assert_eq!(r1, r2);
}
#[test]
fn test_range_empty() {
assert!((range!('(' 9i32, 10i32 ')')).is_empty());
assert!((range!('[' 10i32, 10i32 ')')).is_empty());
assert!((range!('(' 10i32, 10i32 ']')).is_empty());
assert!((range!('[' 10i32, 9i32 ']')).is_empty());
}
#[test]
fn test_intersection() {
let r1 = range!('[' 10i32, 15i32 ')');
let r2 = range!('(' 20i32, 25i32 ']');
assert!(r1.intersect(&r2).is_empty());
assert!(r2.intersect(&r1).is_empty());
assert_eq!(r1, r1.intersect(&range!('(', ')')));
assert_eq!(r1, (range!('(', ')')).intersect(&r1));
let r2 = range!('(' 10i32, ')');
let exp = Range::new(r2.lower().clone(), r1.upper().clone());
assert_eq!(exp, r1.intersect(&r2));
assert_eq!(exp, r2.intersect(&r1));
let r2 = range!('(', 15i32 ']');
assert_eq!(r1, r1.intersect(&r2));
assert_eq!(r1, r2.intersect(&r1));
let r2 = range!('[' 11i32, 14i32 ')');
assert_eq!(r2, r1.intersect(&r2));
assert_eq!(r2, r2.intersect(&r1));
}
#[test]
fn test_contains_range() {
assert!(Range::<i32>::empty().contains_range(&Range::empty()));
let r1 = range!('[' 10i32, 15i32 ')');
assert!(r1.contains_range(&r1));
let r2 = range!('(' 10i32, ')');
assert!(!r1.contains_range(&r2));
assert!(!r2.contains_range(&r1));
let r2 = range!('(', 15i32 ']');
assert!(!r1.contains_range(&r2));
assert!(r2.contains_range(&r1));
}
}
|
use namespace::Namespace;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::traits::{self, IntercrateMode, FutureCompatOverlapErrorKind};
use rustc::ty::TyCtxt;
use rustc::ty::relate::TraitObjectMode;
use lint;
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
}
struct InherentOverlapChecker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
fn check_for_common_items_in_impls(
&self, impl1: DefId, impl2: DefId,
overlap: traits::OverlapResult,
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>)
{
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
(item.ident, Namespace::from(item.kind))
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
let impl_items2 = self.tcx.associated_item_def_ids(impl2);
for &item1 in &impl_items1[..] {
let (name, namespace) = name_and_namespace(item1);
for &item2 in &impl_items2[..] {
if (name, namespace) == name_and_namespace(item2) {
let node_id = self.tcx.hir().as_local_node_id(impl1);
let mut err = match used_to_be_allowed {
Some(kind) if node_id.is_some() => {
let lint = match kind {
FutureCompatOverlapErrorKind::Issue43355 =>
lint::builtin::INCOHERENT_FUNDAMENTAL_IMPLS,
FutureCompatOverlapErrorKind::Issue33140 =>
lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS,
};
self.tcx.struct_span_lint_node(
lint,
node_id.unwrap(),
self.tcx.span_of_impl(item1).unwrap(),
&format!("duplicate definitions with name `{}` (E0592)", name)
)
}
_ => {
struct_span_err!(self.tcx.sess,
self.tcx.span_of_impl(item1).unwrap(),
E0592,
"duplicate definitions with name `{}`",
name)
}
};
err.span_label(self.tcx.span_of_impl(item1).unwrap(),
format!("duplicate definitions for `{}`", name));
err.span_label(self.tcx.span_of_impl(item2).unwrap(),
format!("other definition for `{}`", name));
for cause in &overlap.intercrate_ambiguity_causes {
cause.add_intercrate_ambiguity_hint(&mut err);
}
if overlap.involves_placeholder {
traits::add_placeholder_note(&mut err);
}
err.emit();
}
}
}
}
fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
let impls = self.tcx.inherent_impls(ty_def_id);
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
// First, check if the impl was forbidden under the
// old rules. In that case, just have an error.
let used_to_be_allowed = traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Issue43355,
TraitObjectMode::NoSquash,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
None,
);
false
},
|| true,
);
if !used_to_be_allowed {
continue;
}
// Then, check if the impl was forbidden under only
// #43355. In that case, emit an #43355 error.
let used_to_be_allowed = traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Fixed,
TraitObjectMode::NoSquash,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
Some(FutureCompatOverlapErrorKind::Issue43355),
);
false
},
|| true,
);
if !used_to_be_allowed {
continue;
}
// Then, check if the impl was forbidden under
// #33140. In that case, emit a #33140 error.
traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Fixed,
TraitObjectMode::SquashAutoTraitsIssue33140,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
Some(FutureCompatOverlapErrorKind::Issue33140),
);
false
},
|| true,
);
}
}
}
}
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemKind::Enum(..) |
hir::ItemKind::Struct(..) |
hir::ItemKind::Trait(..) |
hir::ItemKind::Union(..) => {
let type_def_id = self.tcx.hir().local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);
}
_ => {}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
}
fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
}
}
Revert "fix stupid bug"
This reverts commit 6e4b2b3ae79770c7ccfcdbfc90dc34fe47ec5f09.
use namespace::Namespace;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::traits::{self, IntercrateMode, FutureCompatOverlapErrorKind};
use rustc::ty::TyCtxt;
use rustc::ty::relate::TraitObjectMode;
use lint;
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum) {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
}
struct InherentOverlapChecker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
fn check_for_common_items_in_impls(
&self, impl1: DefId, impl2: DefId,
overlap: traits::OverlapResult,
used_to_be_allowed: Option<FutureCompatOverlapErrorKind>)
{
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
(item.ident, Namespace::from(item.kind))
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
let impl_items2 = self.tcx.associated_item_def_ids(impl2);
for &item1 in &impl_items1[..] {
let (name, namespace) = name_and_namespace(item1);
for &item2 in &impl_items2[..] {
if (name, namespace) == name_and_namespace(item2) {
let node_id = self.tcx.hir().as_local_node_id(impl1);
let mut err = match used_to_be_allowed {
Some(kind) if node_id.is_some() => {
let lint = match kind {
FutureCompatOverlapErrorKind::Issue43355 =>
lint::builtin::INCOHERENT_FUNDAMENTAL_IMPLS,
FutureCompatOverlapErrorKind::Issue33140 =>
lint::builtin::ORDER_DEPENDENT_TRAIT_OBJECTS,
};
self.tcx.struct_span_lint_node(
lint,
node_id.unwrap(),
self.tcx.span_of_impl(item1).unwrap(),
&format!("duplicate definitions with name `{}` (E0592)", name)
)
}
_ => {
struct_span_err!(self.tcx.sess,
self.tcx.span_of_impl(item1).unwrap(),
E0592,
"duplicate definitions with name `{}`",
name)
}
};
err.span_label(self.tcx.span_of_impl(item1).unwrap(),
format!("duplicate definitions for `{}`", name));
err.span_label(self.tcx.span_of_impl(item2).unwrap(),
format!("other definition for `{}`", name));
for cause in &overlap.intercrate_ambiguity_causes {
cause.add_intercrate_ambiguity_hint(&mut err);
}
if overlap.involves_placeholder {
traits::add_placeholder_note(&mut err);
}
err.emit();
}
}
}
}
fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
let impls = self.tcx.inherent_impls(ty_def_id);
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
let mut used_to_be_allowed = traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Issue43355,
TraitObjectMode::NoSquash,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
Some(FutureCompatOverlapErrorKind::Issue43355),
);
false
},
|| true,
);
if used_to_be_allowed {
used_to_be_allowed = traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Fixed,
TraitObjectMode::NoSquash,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
None,
);
false
},
|| true,
);
}
if used_to_be_allowed {
traits::overlapping_impls(
self.tcx,
impl1_def_id,
impl2_def_id,
IntercrateMode::Fixed,
TraitObjectMode::SquashAutoTraitsIssue33140,
|overlap| {
self.check_for_common_items_in_impls(
impl1_def_id,
impl2_def_id,
overlap,
Some(FutureCompatOverlapErrorKind::Issue33140),
);
false
},
|| true,
);
}
}
}
}
}
impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
fn visit_item(&mut self, item: &'v hir::Item) {
match item.node {
hir::ItemKind::Enum(..) |
hir::ItemKind::Struct(..) |
hir::ItemKind::Trait(..) |
hir::ItemKind::Union(..) => {
let type_def_id = self.tcx.hir().local_def_id(item.id);
self.check_for_overlapping_inherent_impls(type_def_id);
}
_ => {}
}
}
fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
}
fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
}
}
|
extern crate rtlsdr;
use std::os::raw::{c_void, c_uchar};
use std::ptr;
use rtlsdr::Error;
use std::time::Duration;
use std::thread;
#[allow(unused_mut)]
fn sdr_config(dev: &rtlsdr::Device) -> Error {
let (m, p, s, mut err) = dev.get_usb_strings();
match err {
Error::NoError => println!("set_xtal_freq successful"),
_ => return err,
};
println!("m: {}\n p: {}\n s: {}\n err: {:?}\n", m, p, s, err);
// ---------- Get/Set/Get Hardware Info ----------
println!("1. Getting hardware info...");
let (mut hw_info, mut err) = dev.get_hw_info();
println!("Error: {:?}", err);
println!("Vendor ID: {:?}", hw_info.vendor_id);
println!("Product ID: {:?}", hw_info.product_id);
println!("Manufacturer: {:?}", hw_info.manufact);
println!("Product: {:?}", hw_info.product);
println!("Serial number: {:?}", hw_info.serial);
println!("Serial number enabled: {:?}", hw_info.have_serial);
println!("IR endpoint enabled: {:?}", hw_info.enable_ir);
println!("Remote wakeup enabled: {:?}", hw_info.remote_wakeup);
println!("");
println!("Writing hardware info...");
err = dev.set_hw_info(&hw_info);
println!("Writing hardware info return message: {:?}\n", err);
println!("2. Getting hardware info...");
let (hw_info, mut err) = dev.get_hw_info();
println!("Error: {:?}", err);
println!("Vendor ID: {:?}", hw_info.vendor_id);
println!("Product ID: {:?}", hw_info.product_id);
println!("Manufacturer: {:?}", hw_info.manufact);
println!("Product: {:?}", hw_info.product);
println!("Serial number: {:?}", hw_info.serial);
println!("Serial number enabled: {:?}", hw_info.have_serial);
println!("IR endpoint enabled: {:?}", hw_info.enable_ir);
println!("Remote wakeup enabled: {:?}", hw_info.remote_wakeup);
println!("");
// ---------- Get Tuner Gain ----------
println!("get_tuner_type: {}", dev.get_tuner_type());
err = dev.set_xtal_freq(28800000, 28800000);
match err {
Error::NoError => println!("set_xtal_freq - 28800000"),
_ => return err,
};
println!("");
// ---------- Set Tuner Gain ----------
err = dev.set_tuner_gain_mode(true);
match err {
Error::NoError => println!("set_tuner_gain_mode successful..."),
_ => return err,
};
let (gains, mut err) = dev.get_tuner_gains();
match err {
Error::NoError => println!("get_tuner_gains successful..."),
_ => println!("get_tuner_gains failed - {:?}", err), // return err,
};
println!("\ntuner gains: {:?}\n", gains);
err = dev.set_tuner_gain(gains[2]);
match err {
Error::NoError => println!("set_tuner_gain {:?} successful...", gains[2]),
_ => return err,
};
println!("");
// ---------- Get/Set Sample Rate ----------
let samplerate: i32 = 2083334;
err = dev.set_sample_rate(samplerate);
match err {
Error::NoError => println!("set_sample_rate {} successful...", samplerate),
_ => return err,
};
println!("get_sample_rate {} successful...\n", dev.get_sample_rate());
// ---------- Get/Set Xtal Freq ----------
let (mut rtl_freq, mut tuner_freq, mut err) = dev.get_xtal_freq();
match err {
Error::NoError => {
println!("get_xtal_freq successful - rtl_freq: {}, tuner_freq: {}",
rtl_freq,
tuner_freq)
}
_ => return err,
};
rtl_freq = 28800000;
tuner_freq = 28800000;
err = dev.set_xtal_freq(rtl_freq, tuner_freq);
match err {
Error::NoError => {
println!("set_xtal_freq successful - rtl_freq: {}, tuner_freq: {}",
rtl_freq,
tuner_freq)
}
_ => return err,
};
println!("");
// ---------- Get/Set Center Freq ----------
err = dev.set_center_freq(978000000);
match err {
Error::NoError => println!("set_center_freq successful - 978000000"),
_ => return err,
};
println!("get_center_freq: {}\n", dev.get_center_freq());
// ---------- Set Tuner Bandwidth ----------
let bw: i32 = 1000000;
println!("Setting bandwidth: {}", bw);
err = dev.set_tuner_bandwidth(bw);
match err {
Error::NoError => println!("set_tuner_bandwidth {} Successful", bw),
_ => return err,
};
println!("");
// ---------- Buffer Reset ----------
err = dev.reset_buffer();
match err {
Error::NoError => println!("reset_buffer successful..."),
_ => return err,
};
// ---------- Get/Set Freq Correction ----------
let mut freq_corr = dev.get_freq_correction();
println!("get_freq_correction - {}", freq_corr);
freq_corr += 1;
let err = dev.set_freq_correction(freq_corr);
match err {
Error::NoError => println!("set_freq_correction successful - {}", freq_corr),
_ => return err,
};
println!("");
// ---------- ----------
Error::NoError
}
unsafe extern "C" fn read_async_callback(buf: *mut c_uchar, len: u32, ctx: *mut c_void) {
let _ = ctx;
let v = Vec::<u8>::from_raw_parts(buf, len as usize, len as usize);
println!("----- read_async_callback buffer size - {}", len);
println!("----- {} {} {} {} {} {}",
v[0],
v[1],
v[2],
v[3],
v[4],
v[5]);
}
fn main() {
// ---------- Device Check ----------
let count = rtlsdr::get_device_count();
if count == 0 {
println!("No devices found, exiting.");
return;
}
for i in 0..count {
let (m, p, s, err) = rtlsdr::get_device_usb_strings(i);
println!("get_device_usb_strings: {:?} - {} {} {}", err, m, p, s);
}
let index = 0;
println!("===== Device name, index {}: {} =====",
index,
rtlsdr::get_device_name(0));
println!("===== Running tests using device indx: 0 =====\n");
let (dev, mut err) = rtlsdr::open(index);
match err {
Error::NoError => println!("open successful"),
_ => return,
}
err = sdr_config(&dev);
match err {
Error::NoError => println!("sdr_config successful..."),
_ => return,
}
println!("calling read_sync...");
for i in 0..10 {
let (_, read_count, err) = dev.read_sync(rtlsdr::DEFAULT_BUF_LENGTH);
println!("----- read_sync requested iteration {} -----", i);
println!("\tread_sync requested - {}", rtlsdr::DEFAULT_BUF_LENGTH);
println!("\tread_sync received - {}", read_count);
println!("\tread_sync err msg - {:?}", err);
}
dev.reset_buffer();
// read_async is a blocking call and doesn't return until
// async_stop is explicitly called, so we spawn a thread
// that sleeps for a bit while our async callback runs...
let d = dev.clone();
thread::spawn(move || {
println!("async_stop thread sleeping for 5 seconds...");
thread::sleep(Duration::from_millis(5000));
println!("async_stop thread awake, canceling read async...");
d.cancel_async();
});
println!("calling read_async...");
err = dev.read_async(Some(read_async_callback),
ptr::null_mut(),
rtlsdr::DEFAULT_ASYNC_BUF_NUMBER,
rtlsdr::DEFAULT_BUF_LENGTH);
match err {
Error::NoError => println!("device close successful..."),
_ => println!("dev close error - {:?}", err),
}
err = dev.close();
match err {
Error::NoError => println!("device close successful..."),
_ => println!("dev close error - {:?}", err),
}
}
update comment
extern crate rtlsdr;
use std::os::raw::{c_void, c_uchar};
use std::ptr;
use rtlsdr::Error;
use std::time::Duration;
use std::thread;
#[allow(unused_mut)]
fn sdr_config(dev: &rtlsdr::Device) -> Error {
let (m, p, s, mut err) = dev.get_usb_strings();
match err {
Error::NoError => println!("set_xtal_freq successful"),
_ => return err,
};
println!("m: {}\n p: {}\n s: {}\n err: {:?}\n", m, p, s, err);
// ---------- Get/Set/Get Hardware Info ----------
println!("1. Getting hardware info...");
let (mut hw_info, mut err) = dev.get_hw_info();
println!("Error: {:?}", err);
println!("Vendor ID: {:?}", hw_info.vendor_id);
println!("Product ID: {:?}", hw_info.product_id);
println!("Manufacturer: {:?}", hw_info.manufact);
println!("Product: {:?}", hw_info.product);
println!("Serial number: {:?}", hw_info.serial);
println!("Serial number enabled: {:?}", hw_info.have_serial);
println!("IR endpoint enabled: {:?}", hw_info.enable_ir);
println!("Remote wakeup enabled: {:?}", hw_info.remote_wakeup);
println!("");
println!("Writing hardware info...");
err = dev.set_hw_info(&hw_info);
println!("Writing hardware info return message: {:?}\n", err);
println!("2. Getting hardware info...");
let (hw_info, mut err) = dev.get_hw_info();
println!("Error: {:?}", err);
println!("Vendor ID: {:?}", hw_info.vendor_id);
println!("Product ID: {:?}", hw_info.product_id);
println!("Manufacturer: {:?}", hw_info.manufact);
println!("Product: {:?}", hw_info.product);
println!("Serial number: {:?}", hw_info.serial);
println!("Serial number enabled: {:?}", hw_info.have_serial);
println!("IR endpoint enabled: {:?}", hw_info.enable_ir);
println!("Remote wakeup enabled: {:?}", hw_info.remote_wakeup);
println!("");
// ---------- Get Tuner Gain ----------
println!("get_tuner_type: {}", dev.get_tuner_type());
err = dev.set_xtal_freq(28800000, 28800000);
match err {
Error::NoError => println!("set_xtal_freq - 28800000"),
_ => return err,
};
println!("");
// ---------- Set Tuner Gain ----------
err = dev.set_tuner_gain_mode(true);
match err {
Error::NoError => println!("set_tuner_gain_mode successful..."),
_ => return err,
};
let (gains, mut err) = dev.get_tuner_gains();
match err {
Error::NoError => println!("get_tuner_gains successful..."),
_ => println!("get_tuner_gains failed - {:?}", err), // return err,
};
println!("\ntuner gains: {:?}\n", gains);
err = dev.set_tuner_gain(gains[2]);
match err {
Error::NoError => println!("set_tuner_gain {:?} successful...", gains[2]),
_ => return err,
};
println!("");
// ---------- Get/Set Sample Rate ----------
let samplerate: i32 = 2083334;
err = dev.set_sample_rate(samplerate);
match err {
Error::NoError => println!("set_sample_rate {} successful...", samplerate),
_ => return err,
};
println!("get_sample_rate {} successful...\n", dev.get_sample_rate());
// ---------- Get/Set Xtal Freq ----------
let (mut rtl_freq, mut tuner_freq, mut err) = dev.get_xtal_freq();
match err {
Error::NoError => {
println!("get_xtal_freq successful - rtl_freq: {}, tuner_freq: {}",
rtl_freq,
tuner_freq)
}
_ => return err,
};
rtl_freq = 28800000;
tuner_freq = 28800000;
err = dev.set_xtal_freq(rtl_freq, tuner_freq);
match err {
Error::NoError => {
println!("set_xtal_freq successful - rtl_freq: {}, tuner_freq: {}",
rtl_freq,
tuner_freq)
}
_ => return err,
};
println!("");
// ---------- Get/Set Center Freq ----------
err = dev.set_center_freq(978000000);
match err {
Error::NoError => println!("set_center_freq successful - 978000000"),
_ => return err,
};
println!("get_center_freq: {}\n", dev.get_center_freq());
// ---------- Set Tuner Bandwidth ----------
let bw: i32 = 1000000;
println!("Setting bandwidth: {}", bw);
err = dev.set_tuner_bandwidth(bw);
match err {
Error::NoError => println!("set_tuner_bandwidth {} Successful", bw),
_ => return err,
};
println!("");
// ---------- Buffer Reset ----------
err = dev.reset_buffer();
match err {
Error::NoError => println!("reset_buffer successful..."),
_ => return err,
};
// ---------- Get/Set Freq Correction ----------
let mut freq_corr = dev.get_freq_correction();
println!("get_freq_correction - {}", freq_corr);
freq_corr += 1;
let err = dev.set_freq_correction(freq_corr);
match err {
Error::NoError => println!("set_freq_correction successful - {}", freq_corr),
_ => return err,
};
println!("");
// ---------- ----------
Error::NoError
}
unsafe extern "C" fn read_async_callback(buf: *mut c_uchar, len: u32, ctx: *mut c_void) {
let _ = ctx;
let v = Vec::<u8>::from_raw_parts(buf, len as usize, len as usize);
println!("----- read_async_callback buffer size - {}", len);
println!("----- {} {} {} {} {} {}",
v[0],
v[1],
v[2],
v[3],
v[4],
v[5]);
}
fn main() {
// ---------- Device Check ----------
let count = rtlsdr::get_device_count();
if count == 0 {
println!("No devices found, exiting.");
return;
}
for i in 0..count {
let (m, p, s, err) = rtlsdr::get_device_usb_strings(i);
println!("get_device_usb_strings: {:?} - {} {} {}", err, m, p, s);
}
let index = 0;
println!("===== Device name, index {}: {} =====",
index,
rtlsdr::get_device_name(0));
println!("===== Running tests using device indx: 0 =====\n");
let (dev, mut err) = rtlsdr::open(index);
match err {
Error::NoError => println!("open successful"),
_ => return,
}
err = sdr_config(&dev);
match err {
Error::NoError => println!("sdr_config successful..."),
_ => return,
}
println!("calling read_sync...");
for i in 0..10 {
let (_, read_count, err) = dev.read_sync(rtlsdr::DEFAULT_BUF_LENGTH);
println!("----- read_sync requested iteration {} -----", i);
println!("\tread_sync requested - {}", rtlsdr::DEFAULT_BUF_LENGTH);
println!("\tread_sync received - {}", read_count);
println!("\tread_sync err msg - {:?}", err);
}
dev.reset_buffer();
// read_async is a blocking call and doesn't return until
// async_stop is explicitly called, so we spawn a thread
// that sleeps for a bit while our async callback runs...
let d = dev.clone();
thread::spawn(move || {
println!("async_stop thread sleeping for 5 seconds...");
thread::sleep(Duration::from_millis(5000));
println!("async_stop thread awake, canceling read async...");
d.cancel_async();
});
println!("calling read_async...");
err = dev.read_async(Some(read_async_callback),
ptr::null_mut(),
rtlsdr::DEFAULT_ASYNC_BUF_NUMBER,
rtlsdr::DEFAULT_BUF_LENGTH);
match err {
Error::NoError => println!("read_async returned successfully..."),
_ => println!("read_async return error - {:?}", err),
}
err = dev.close();
match err {
Error::NoError => println!("device close successful..."),
_ => println!("dev close error - {:?}", err),
}
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use device::TextureFilter;
use euclid::{TypedPoint2D, UnknownUnit};
use fnv::FnvHasher;
use profiler::BackendProfileCounters;
use std::collections::{HashMap, HashSet};
use std::f32;
use std::hash::BuildHasherDefault;
use std::{i32, usize};
use std::path::PathBuf;
use std::sync::Arc;
use tiling;
use renderer::BlendMode;
use webrender_traits::{ClipId, ColorF, DeviceUintRect, Epoch, ExternalImageData, ExternalImageId};
use webrender_traits::{ImageData, ImageFormat, NativeFontHandle, PipelineId};
// An ID for a texture that is owned by the
// texture cache module. This can include atlases
// or standalone textures allocated via the
// texture cache (e.g. if an image is too large
// to be added to an atlas). The texture cache
// manages the allocation and freeing of these
// IDs, and the rendering thread maintains a
// map from cache texture ID to native texture.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct CacheTextureId(pub usize);
// Represents the source for a texture.
// These are passed from throughout the
// pipeline until they reach the rendering
// thread, where they are resolved to a
// native texture ID.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum SourceTexture {
Invalid,
TextureCache(CacheTextureId),
External(ExternalImageData),
#[cfg_attr(not(feature = "webgl"), allow(dead_code))]
/// This is actually a gl::GLuint, with the shared texture id between the
/// main context and the WebGL context.
WebGL(u32),
}
const COLOR_FLOAT_TO_FIXED: f32 = 255.0;
pub const ANGLE_FLOAT_TO_FIXED: f32 = 65535.0;
pub const ORTHO_NEAR_PLANE: f32 = -1000000.0;
pub const ORTHO_FAR_PLANE: f32 = 1000000.0;
#[derive(Debug, PartialEq, Eq)]
pub enum TextureSampler {
Color0,
Color1,
Color2,
CacheA8,
CacheRGBA8,
Data32,
ResourceCache,
Layers,
RenderTasks,
Dither,
}
impl TextureSampler {
pub fn color(n: usize) -> TextureSampler {
match n {
0 => TextureSampler::Color0,
1 => TextureSampler::Color1,
2 => TextureSampler::Color2,
_ => {
panic!("There are only 3 color samplers.");
}
}
}
}
/// Optional textures that can be used as a source in the shaders.
/// Textures that are not used by the batch are equal to TextureId::invalid().
#[derive(Copy, Clone, Debug)]
pub struct BatchTextures {
pub colors: [SourceTexture; 3],
}
impl BatchTextures {
pub fn no_texture() -> Self {
BatchTextures {
colors: [SourceTexture::Invalid; 3],
}
}
}
// In some places we need to temporarily bind a texture to any slot.
pub const DEFAULT_TEXTURE: TextureSampler = TextureSampler::Color0;
#[derive(Clone, Copy, Debug)]
pub enum VertexAttribute {
// vertex-frequency basic attributes
Position,
Color,
ColorTexCoord,
// instance-frequency primitive attributes
Data0,
Data1,
}
#[derive(Clone, Copy, Debug)]
pub enum BlurAttribute {
// vertex frequency
Position,
// instance frequency
RenderTaskIndex,
SourceTaskIndex,
Direction,
}
#[derive(Clone, Copy, Debug)]
pub enum ClipAttribute {
// vertex frequency
Position,
// instance frequency
RenderTaskIndex,
LayerIndex,
DataIndex,
SegmentIndex,
ResourceAddress,
}
// A packed RGBA8 color ordered for vertex data or similar.
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct PackedColor {
pub r: u8,
pub g: u8,
pub b: u8,
pub a: u8,
}
impl PackedColor {
pub fn from_color(color: &ColorF) -> PackedColor {
PackedColor {
r: (0.5 + color.r * COLOR_FLOAT_TO_FIXED).floor() as u8,
g: (0.5 + color.g * COLOR_FLOAT_TO_FIXED).floor() as u8,
b: (0.5 + color.b * COLOR_FLOAT_TO_FIXED).floor() as u8,
a: (0.5 + color.a * COLOR_FLOAT_TO_FIXED).floor() as u8,
}
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct PackedVertex {
pub pos: [f32; 2],
}
#[derive(Debug)]
#[repr(C)]
pub struct DebugFontVertex {
pub x: f32,
pub y: f32,
pub color: PackedColor,
pub u: f32,
pub v: f32,
}
impl DebugFontVertex {
pub fn new(x: f32, y: f32, u: f32, v: f32, color: PackedColor) -> DebugFontVertex {
DebugFontVertex {
x: x,
y: y,
color: color,
u: u,
v: v,
}
}
}
#[repr(C)]
pub struct DebugColorVertex {
pub x: f32,
pub y: f32,
pub color: PackedColor,
}
impl DebugColorVertex {
pub fn new(x: f32, y: f32, color: PackedColor) -> DebugColorVertex {
DebugColorVertex {
x: x,
y: y,
color: color,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum RenderTargetMode {
None,
SimpleRenderTarget,
LayerRenderTarget(i32), // Number of texture layers
}
pub enum TextureUpdateOp {
Create {
width: u32,
height: u32,
format: ImageFormat,
filter: TextureFilter,
mode: RenderTargetMode,
data: Option<ImageData>,
},
Update {
page_pos_x: u32, // the texture page position which we want to upload
page_pos_y: u32,
width: u32,
height: u32,
data: Arc<Vec<u8>>,
stride: Option<u32>,
offset: u32,
},
UpdateForExternalBuffer {
rect: DeviceUintRect,
id: ExternalImageId,
channel_index: u8,
stride: Option<u32>,
offset: u32,
},
Grow {
width: u32,
height: u32,
format: ImageFormat,
filter: TextureFilter,
mode: RenderTargetMode,
},
Free,
}
pub struct TextureUpdate {
pub id: CacheTextureId,
pub op: TextureUpdateOp,
}
pub struct TextureUpdateList {
pub updates: Vec<TextureUpdate>,
}
impl TextureUpdateList {
pub fn new() -> TextureUpdateList {
TextureUpdateList {
updates: Vec::new(),
}
}
#[inline]
pub fn push(&mut self, update: TextureUpdate) {
self.updates.push(update);
}
}
/// Mostly wraps a tiling::Frame, adding a bit of extra information.
pub struct RendererFrame {
/// The last rendered epoch for each pipeline present in the frame.
/// This information is used to know if a certain transformation on the layout has
/// been rendered, which is necessary for reftests.
pub pipeline_epoch_map: HashMap<PipelineId, Epoch, BuildHasherDefault<FnvHasher>>,
/// The layers that are currently affected by the over-scrolling animation.
pub layers_bouncing_back: HashSet<ClipId, BuildHasherDefault<FnvHasher>>,
pub frame: Option<tiling::Frame>,
}
impl RendererFrame {
pub fn new(pipeline_epoch_map: HashMap<PipelineId, Epoch, BuildHasherDefault<FnvHasher>>,
layers_bouncing_back: HashSet<ClipId, BuildHasherDefault<FnvHasher>>,
frame: Option<tiling::Frame>)
-> RendererFrame {
RendererFrame {
pipeline_epoch_map: pipeline_epoch_map,
layers_bouncing_back: layers_bouncing_back,
frame: frame,
}
}
}
pub enum ResultMsg {
RefreshShader(PathBuf),
NewFrame(RendererFrame, TextureUpdateList, BackendProfileCounters),
}
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum AxisDirection {
Horizontal,
Vertical,
}
#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq)]
pub struct StackingContextIndex(pub usize);
#[derive(Clone, Copy, Debug)]
pub struct RectUv<T, U = UnknownUnit> {
pub top_left: TypedPoint2D<T, U>,
pub top_right: TypedPoint2D<T, U>,
pub bottom_left: TypedPoint2D<T, U>,
pub bottom_right: TypedPoint2D<T, U>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LowLevelFilterOp {
Blur(Au, AxisDirection),
Brightness(Au),
Contrast(Au),
Grayscale(Au),
/// Fixed-point in `ANGLE_FLOAT_TO_FIXED` units.
HueRotate(i32),
Invert(Au),
Opacity(Au),
Saturate(Au),
Sepia(Au),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum HardwareCompositeOp {
PremultipliedAlpha,
}
impl HardwareCompositeOp {
pub fn to_blend_mode(&self) -> BlendMode {
match *self {
HardwareCompositeOp::PremultipliedAlpha => BlendMode::PremultipliedAlpha,
}
}
}
Auto merge of #1385 - staktrace:unused_import, r=kvark
Remove unused import
<!-- Reviewable:start -->
This change is [<img src="https://reviewable.io/review_button.svg" height="34" align="absmiddle" alt="Reviewable"/>](https://reviewable.io/reviews/servo/webrender/1385)
<!-- Reviewable:end -->
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use device::TextureFilter;
use euclid::{TypedPoint2D, UnknownUnit};
use fnv::FnvHasher;
use profiler::BackendProfileCounters;
use std::collections::{HashMap, HashSet};
use std::f32;
use std::hash::BuildHasherDefault;
use std::{i32, usize};
use std::path::PathBuf;
use std::sync::Arc;
use tiling;
use renderer::BlendMode;
use webrender_traits::{ClipId, ColorF, DeviceUintRect, Epoch, ExternalImageData, ExternalImageId};
use webrender_traits::{ImageData, ImageFormat, PipelineId};
// An ID for a texture that is owned by the
// texture cache module. This can include atlases
// or standalone textures allocated via the
// texture cache (e.g. if an image is too large
// to be added to an atlas). The texture cache
// manages the allocation and freeing of these
// IDs, and the rendering thread maintains a
// map from cache texture ID to native texture.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub struct CacheTextureId(pub usize);
// Represents the source for a texture.
// These are passed from throughout the
// pipeline until they reach the rendering
// thread, where they are resolved to a
// native texture ID.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum SourceTexture {
Invalid,
TextureCache(CacheTextureId),
External(ExternalImageData),
#[cfg_attr(not(feature = "webgl"), allow(dead_code))]
/// This is actually a gl::GLuint, with the shared texture id between the
/// main context and the WebGL context.
WebGL(u32),
}
const COLOR_FLOAT_TO_FIXED: f32 = 255.0;
pub const ANGLE_FLOAT_TO_FIXED: f32 = 65535.0;
pub const ORTHO_NEAR_PLANE: f32 = -1000000.0;
pub const ORTHO_FAR_PLANE: f32 = 1000000.0;
#[derive(Debug, PartialEq, Eq)]
pub enum TextureSampler {
Color0,
Color1,
Color2,
CacheA8,
CacheRGBA8,
Data32,
ResourceCache,
Layers,
RenderTasks,
Dither,
}
impl TextureSampler {
pub fn color(n: usize) -> TextureSampler {
match n {
0 => TextureSampler::Color0,
1 => TextureSampler::Color1,
2 => TextureSampler::Color2,
_ => {
panic!("There are only 3 color samplers.");
}
}
}
}
/// Optional textures that can be used as a source in the shaders.
/// Textures that are not used by the batch are equal to TextureId::invalid().
#[derive(Copy, Clone, Debug)]
pub struct BatchTextures {
pub colors: [SourceTexture; 3],
}
impl BatchTextures {
pub fn no_texture() -> Self {
BatchTextures {
colors: [SourceTexture::Invalid; 3],
}
}
}
// In some places we need to temporarily bind a texture to any slot.
pub const DEFAULT_TEXTURE: TextureSampler = TextureSampler::Color0;
#[derive(Clone, Copy, Debug)]
pub enum VertexAttribute {
// vertex-frequency basic attributes
Position,
Color,
ColorTexCoord,
// instance-frequency primitive attributes
Data0,
Data1,
}
#[derive(Clone, Copy, Debug)]
pub enum BlurAttribute {
// vertex frequency
Position,
// instance frequency
RenderTaskIndex,
SourceTaskIndex,
Direction,
}
#[derive(Clone, Copy, Debug)]
pub enum ClipAttribute {
// vertex frequency
Position,
// instance frequency
RenderTaskIndex,
LayerIndex,
DataIndex,
SegmentIndex,
ResourceAddress,
}
// A packed RGBA8 color ordered for vertex data or similar.
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct PackedColor {
pub r: u8,
pub g: u8,
pub b: u8,
pub a: u8,
}
impl PackedColor {
pub fn from_color(color: &ColorF) -> PackedColor {
PackedColor {
r: (0.5 + color.r * COLOR_FLOAT_TO_FIXED).floor() as u8,
g: (0.5 + color.g * COLOR_FLOAT_TO_FIXED).floor() as u8,
b: (0.5 + color.b * COLOR_FLOAT_TO_FIXED).floor() as u8,
a: (0.5 + color.a * COLOR_FLOAT_TO_FIXED).floor() as u8,
}
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct PackedVertex {
pub pos: [f32; 2],
}
#[derive(Debug)]
#[repr(C)]
pub struct DebugFontVertex {
pub x: f32,
pub y: f32,
pub color: PackedColor,
pub u: f32,
pub v: f32,
}
impl DebugFontVertex {
pub fn new(x: f32, y: f32, u: f32, v: f32, color: PackedColor) -> DebugFontVertex {
DebugFontVertex {
x: x,
y: y,
color: color,
u: u,
v: v,
}
}
}
#[repr(C)]
pub struct DebugColorVertex {
pub x: f32,
pub y: f32,
pub color: PackedColor,
}
impl DebugColorVertex {
pub fn new(x: f32, y: f32, color: PackedColor) -> DebugColorVertex {
DebugColorVertex {
x: x,
y: y,
color: color,
}
}
}
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum RenderTargetMode {
None,
SimpleRenderTarget,
LayerRenderTarget(i32), // Number of texture layers
}
pub enum TextureUpdateOp {
Create {
width: u32,
height: u32,
format: ImageFormat,
filter: TextureFilter,
mode: RenderTargetMode,
data: Option<ImageData>,
},
Update {
page_pos_x: u32, // the texture page position which we want to upload
page_pos_y: u32,
width: u32,
height: u32,
data: Arc<Vec<u8>>,
stride: Option<u32>,
offset: u32,
},
UpdateForExternalBuffer {
rect: DeviceUintRect,
id: ExternalImageId,
channel_index: u8,
stride: Option<u32>,
offset: u32,
},
Grow {
width: u32,
height: u32,
format: ImageFormat,
filter: TextureFilter,
mode: RenderTargetMode,
},
Free,
}
pub struct TextureUpdate {
pub id: CacheTextureId,
pub op: TextureUpdateOp,
}
pub struct TextureUpdateList {
pub updates: Vec<TextureUpdate>,
}
impl TextureUpdateList {
pub fn new() -> TextureUpdateList {
TextureUpdateList {
updates: Vec::new(),
}
}
#[inline]
pub fn push(&mut self, update: TextureUpdate) {
self.updates.push(update);
}
}
/// Mostly wraps a tiling::Frame, adding a bit of extra information.
pub struct RendererFrame {
/// The last rendered epoch for each pipeline present in the frame.
/// This information is used to know if a certain transformation on the layout has
/// been rendered, which is necessary for reftests.
pub pipeline_epoch_map: HashMap<PipelineId, Epoch, BuildHasherDefault<FnvHasher>>,
/// The layers that are currently affected by the over-scrolling animation.
pub layers_bouncing_back: HashSet<ClipId, BuildHasherDefault<FnvHasher>>,
pub frame: Option<tiling::Frame>,
}
impl RendererFrame {
pub fn new(pipeline_epoch_map: HashMap<PipelineId, Epoch, BuildHasherDefault<FnvHasher>>,
layers_bouncing_back: HashSet<ClipId, BuildHasherDefault<FnvHasher>>,
frame: Option<tiling::Frame>)
-> RendererFrame {
RendererFrame {
pipeline_epoch_map: pipeline_epoch_map,
layers_bouncing_back: layers_bouncing_back,
frame: frame,
}
}
}
pub enum ResultMsg {
RefreshShader(PathBuf),
NewFrame(RendererFrame, TextureUpdateList, BackendProfileCounters),
}
#[repr(u32)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum AxisDirection {
Horizontal,
Vertical,
}
#[derive(Debug, Clone, Copy, Eq, Hash, PartialEq)]
pub struct StackingContextIndex(pub usize);
#[derive(Clone, Copy, Debug)]
pub struct RectUv<T, U = UnknownUnit> {
pub top_left: TypedPoint2D<T, U>,
pub top_right: TypedPoint2D<T, U>,
pub bottom_left: TypedPoint2D<T, U>,
pub bottom_right: TypedPoint2D<T, U>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LowLevelFilterOp {
Blur(Au, AxisDirection),
Brightness(Au),
Contrast(Au),
Grayscale(Au),
/// Fixed-point in `ANGLE_FLOAT_TO_FIXED` units.
HueRotate(i32),
Invert(Au),
Opacity(Au),
Saturate(Au),
Sepia(Au),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum HardwareCompositeOp {
PremultipliedAlpha,
}
impl HardwareCompositeOp {
pub fn to_blend_mode(&self) -> BlendMode {
match *self {
HardwareCompositeOp::PremultipliedAlpha => BlendMode::PremultipliedAlpha,
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.