text stringlengths 8 4.13M |
|---|
// This file is part of Substrate.
// Copyright (C) 2018-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! GRANDPA block finality proof generation and check.
//!
//! Finality of block B is proved by providing:
//! 1) the justification for the descendant block F;
//! 2) headers sub-chain (B; F] if B != F;
//! 3) proof of GRANDPA::authorities() if the set changes at block F.
//!
//! Since earliest possible justification is returned, the GRANDPA authorities set
//! at the block F is guaranteed to be the same as in the block B (this is because block
//! that enacts new GRANDPA authorities set always comes with justification). It also
//! means that the `set_id` is the same at blocks B and F.
//!
//! Let U be the last finalized block known to caller. If authorities set has changed several
//! times in the (U; F] interval, multiple finality proof fragments are returned (one for each
//! authority set change) and they must be verified in-order.
//!
//! Finality proof provider can choose how to provide finality proof on its own. The incomplete
//! finality proof (that finalizes some block C that is ancestor of the B and descendant
//! of the U) could be returned.
use log::{trace, warn};
use std::sync::Arc;
use finality_grandpa::BlockNumberOps;
use parity_scale_codec::{Decode, Encode};
use sc_client_api::{
backend::Backend,
light::{FetchChecker, RemoteReadRequest},
ProofProvider, StorageProof, StorageProvider,
};
use sc_telemetry::{telemetry, CONSENSUS_INFO};
use sp_blockchain::{Backend as BlockchainBackend, Error as ClientError, Result as ClientResult};
use sp_core::storage::StorageKey;
use sp_finality_grandpa::{
AuthorityId, AuthorityList, VersionedAuthorityList, GRANDPA_AUTHORITIES_KEY,
};
use sp_runtime::{
generic::BlockId,
traits::{Block as BlockT, Header as HeaderT, NumberFor, One},
Justification,
};
use crate::justification::GrandpaJustification;
use crate::VoterSet;
/// Maximum number of fragments that we want to return in a single prove_finality call.
const MAX_FRAGMENTS_IN_PROOF: usize = 8;
/// GRANDPA authority set related methods for the finality proof provider.
pub trait AuthoritySetForFinalityProver<Block: BlockT>: Send + Sync {
/// Read GRANDPA_AUTHORITIES_KEY from storage at given block.
fn authorities(&self, block: &BlockId<Block>) -> ClientResult<AuthorityList>;
/// Prove storage read of GRANDPA_AUTHORITIES_KEY at given block.
fn prove_authorities(&self, block: &BlockId<Block>) -> ClientResult<StorageProof>;
}
/// Trait that combines `StorageProvider` and `ProofProvider`
pub trait StorageAndProofProvider<Block, BE>:
StorageProvider<Block, BE> + ProofProvider<Block> + Send + Sync
where
Block: BlockT,
BE: Backend<Block> + Send + Sync,
{
}
/// Blanket implementation.
impl<Block, BE, P> StorageAndProofProvider<Block, BE> for P
where
Block: BlockT,
BE: Backend<Block> + Send + Sync,
P: StorageProvider<Block, BE> + ProofProvider<Block> + Send + Sync,
{
}
/// Implementation of AuthoritySetForFinalityProver.
impl<BE, Block: BlockT> AuthoritySetForFinalityProver<Block>
for Arc<dyn StorageAndProofProvider<Block, BE>>
where
BE: Backend<Block> + Send + Sync + 'static,
{
fn authorities(&self, block: &BlockId<Block>) -> ClientResult<AuthorityList> {
let storage_key = StorageKey(GRANDPA_AUTHORITIES_KEY.to_vec());
self.storage(block, &storage_key)?
.and_then(|encoded| VersionedAuthorityList::decode(&mut encoded.0.as_slice()).ok())
.map(|versioned| versioned.into())
.ok_or(ClientError::InvalidAuthoritiesSet)
}
fn prove_authorities(&self, block: &BlockId<Block>) -> ClientResult<StorageProof> {
self.read_proof(block, &mut std::iter::once(GRANDPA_AUTHORITIES_KEY))
}
}
/// GRANDPA authority set related methods for the finality proof checker.
pub trait AuthoritySetForFinalityChecker<Block: BlockT>: Send + Sync {
/// Check storage read proof of GRANDPA_AUTHORITIES_KEY at given block.
fn check_authorities_proof(
&self,
hash: Block::Hash,
header: Block::Header,
proof: StorageProof,
) -> ClientResult<AuthorityList>;
}
/// FetchChecker-based implementation of AuthoritySetForFinalityChecker.
impl<Block: BlockT> AuthoritySetForFinalityChecker<Block> for Arc<dyn FetchChecker<Block>> {
fn check_authorities_proof(
&self,
hash: Block::Hash,
header: Block::Header,
proof: StorageProof,
) -> ClientResult<AuthorityList> {
let storage_key = GRANDPA_AUTHORITIES_KEY.to_vec();
let request = RemoteReadRequest {
block: hash,
header,
keys: vec![storage_key.clone()],
retry_count: None,
};
self.check_read_proof(&request, proof).and_then(|results| {
let maybe_encoded = results.get(&storage_key).expect(
"storage_key is listed in the request keys; \
check_read_proof must return a value for each requested key;
qed",
);
maybe_encoded
.as_ref()
.and_then(|encoded| VersionedAuthorityList::decode(&mut encoded.as_slice()).ok())
.map(|versioned| versioned.into())
.ok_or(ClientError::InvalidAuthoritiesSet)
})
}
}
/// Finality proof provider for serving network requests.
pub struct FinalityProofProvider<B, Block: BlockT> {
backend: Arc<B>,
authority_provider: Arc<dyn AuthoritySetForFinalityProver<Block>>,
}
impl<B, Block: BlockT> FinalityProofProvider<B, Block>
where
B: Backend<Block> + Send + Sync + 'static,
{
/// Create new finality proof provider using:
///
/// - backend for accessing blockchain data;
/// - authority_provider for calling and proving runtime methods.
pub fn new<P>(backend: Arc<B>, authority_provider: P) -> Self
where
P: AuthoritySetForFinalityProver<Block> + 'static,
{
FinalityProofProvider { backend, authority_provider: Arc::new(authority_provider) }
}
/// Create new finality proof provider for the service using:
///
/// - backend for accessing blockchain data;
/// - storage_and_proof_provider, which is generally a client.
pub fn new_for_service(
backend: Arc<B>,
storage_and_proof_provider: Arc<dyn StorageAndProofProvider<Block, B>>,
) -> Arc<Self> {
Arc::new(Self::new(backend, storage_and_proof_provider))
}
}
impl<B, Block> FinalityProofProvider<B, Block>
where
Block: BlockT,
NumberFor<Block>: BlockNumberOps,
B: Backend<Block> + Send + Sync + 'static,
{
/// Prove finality for the range (begin; end] hash. Returns None if there are no finalized
/// blocks unknown in the range.
pub fn prove_finality(
&self,
begin: Block::Hash,
end: Block::Hash,
authorities_set_id: u64,
) -> Result<Option<Vec<u8>>, ClientError> {
prove_finality::<_, _, GrandpaJustification<Block>>(
&*self.backend.blockchain(),
&*self.authority_provider,
authorities_set_id,
begin,
end,
)
}
}
impl<B, Block> sc_network::config::FinalityProofProvider<Block> for FinalityProofProvider<B, Block>
where
Block: BlockT,
NumberFor<Block>: BlockNumberOps,
B: Backend<Block> + Send + Sync + 'static,
{
fn prove_finality(
&self,
for_block: Block::Hash,
request: &[u8],
) -> Result<Option<Vec<u8>>, ClientError> {
let request: FinalityProofRequest<Block::Hash> = Decode::decode(&mut &request[..])
.map_err(|e| {
warn!(target: "afg", "Unable to decode finality proof request: {}", e.what());
ClientError::Backend("Invalid finality proof request".to_string())
})?;
match request {
FinalityProofRequest::Original(request) =>
prove_finality::<_, _, GrandpaJustification<Block>>(
&*self.backend.blockchain(),
&*self.authority_provider,
request.authorities_set_id,
request.last_finalized,
for_block,
),
}
}
}
/// The effects of block finality.
#[derive(Debug, PartialEq)]
pub struct FinalityEffects<Header: HeaderT> {
/// The (ordered) set of headers that could be imported.
pub headers_to_import: Vec<Header>,
/// The hash of the block that could be finalized.
pub block: Header::Hash,
/// The justification for the block.
pub justification: Vec<u8>,
/// New authorities set id that should be applied starting from block.
pub new_set_id: u64,
/// New authorities set that should be applied starting from block.
pub new_authorities: AuthorityList,
}
/// Single fragment of proof-of-finality.
///
/// Finality for block B is proved by providing:
/// 1) the justification for the descendant block F;
/// 2) headers sub-chain (B; F] if B != F;
/// 3) proof of GRANDPA::authorities() if the set changes at block F.
#[derive(Debug, PartialEq, Encode, Decode, Clone)]
pub struct FinalityProofFragment<Header: HeaderT> {
/// The hash of block F for which justification is provided.
pub block: Header::Hash,
/// Justification of the block F.
pub justification: Vec<u8>,
/// The set of headers in the range (U; F] that we believe are unknown to the caller. Ordered.
pub unknown_headers: Vec<Header>,
/// Optional proof of execution of GRANDPA::authorities() at the `block`.
pub authorities_proof: Option<StorageProof>,
}
/// Proof of finality is the ordered set of finality fragments, where:
/// - last fragment provides justification for the best possible block from the requested range;
/// - all other fragments provide justifications for GRANDPA authorities set changes within
/// requested range.
type FinalityProof<Header> = Vec<FinalityProofFragment<Header>>;
/// Finality proof request data.
#[derive(Debug, Encode, Decode)]
enum FinalityProofRequest<H: Encode + Decode> {
/// Original version of the request.
Original(OriginalFinalityProofRequest<H>),
}
/// Original version of finality proof request.
#[derive(Debug, Encode, Decode)]
struct OriginalFinalityProofRequest<H: Encode + Decode> {
/// The authorities set id we are waiting proof from.
///
/// The first justification in the proof must be signed by this authority set.
pub authorities_set_id: u64,
/// Hash of the last known finalized block.
pub last_finalized: H,
}
/// Prepare data blob associated with finality proof request.
pub(crate) fn make_finality_proof_request<H: Encode + Decode>(
last_finalized: H,
authorities_set_id: u64,
) -> Vec<u8> {
FinalityProofRequest::Original(OriginalFinalityProofRequest {
authorities_set_id,
last_finalized,
})
.encode()
}
/// Prepare proof-of-finality for the best possible block in the range: (begin; end].
///
/// It is assumed that the caller already have a proof-of-finality for the block 'begin'.
/// It is assumed that the caller already knows all blocks in the range (begin; end].
///
/// Returns None if there are no finalized blocks unknown to the caller.
pub(crate) fn prove_finality<Block: BlockT, B: BlockchainBackend<Block>, J>(
blockchain: &B,
authorities_provider: &dyn AuthoritySetForFinalityProver<Block>,
authorities_set_id: u64,
begin: Block::Hash,
end: Block::Hash,
) -> ::sp_blockchain::Result<Option<Vec<u8>>>
where
J: ProvableJustification<Block::Header>,
{
let begin_id = BlockId::Hash(begin);
let begin_number = blockchain.expect_block_number_from_id(&begin_id)?;
// early-return if we sure that there are no blocks finalized AFTER begin block
let info = blockchain.info();
if info.finalized_number <= begin_number {
trace!(
target: "afg",
"Requested finality proof for descendant of #{} while we only have finalized #{}. Returning empty proof.",
begin_number,
info.finalized_number,
);
return Ok(None)
}
// check if blocks range is valid. It is the caller responsibility to ensure
// that it only asks peers that know about whole blocks range
let end_number = blockchain.expect_block_number_from_id(&BlockId::Hash(end))?;
if begin_number + One::one() > end_number {
return Err(ClientError::Backend(format!(
"Cannot generate finality proof for invalid range: {}..{}",
begin_number, end_number
)))
}
// early-return if we sure that the block is NOT a part of canonical chain
let canonical_begin = blockchain.expect_block_hash_from_id(&BlockId::Number(begin_number))?;
if begin != canonical_begin {
return Err(ClientError::Backend(format!(
"Cannot generate finality proof for non-canonical block: {}",
begin
)))
}
// iterate justifications && try to prove finality
let mut fragment_index = 0;
let mut current_authorities = authorities_provider.authorities(&begin_id)?;
let mut current_number = begin_number + One::one();
let mut finality_proof = Vec::new();
let mut unknown_headers = Vec::new();
let mut latest_proof_fragment = None;
let begin_authorities = current_authorities.clone();
loop {
let current_id = BlockId::Number(current_number);
// check if header is unknown to the caller
if current_number > end_number {
let unknown_header = blockchain.expect_header(current_id)?;
unknown_headers.push(unknown_header);
}
if let Some(justification) = blockchain.justification(current_id)? {
// check if the current block enacts new GRANDPA authorities set
let new_authorities = authorities_provider.authorities(¤t_id)?;
let new_authorities_proof = if current_authorities != new_authorities {
current_authorities = new_authorities;
Some(authorities_provider.prove_authorities(¤t_id)?)
} else {
None
};
// prepare finality proof for the current block
let current = blockchain.expect_block_hash_from_id(&BlockId::Number(current_number))?;
let proof_fragment = FinalityProofFragment {
block: current,
justification,
unknown_headers: ::std::mem::take(&mut unknown_headers),
authorities_proof: new_authorities_proof,
};
// append justification to finality proof if required
let justifies_end_block = current_number >= end_number;
let justifies_authority_set_change = proof_fragment.authorities_proof.is_some();
if justifies_end_block || justifies_authority_set_change {
// check if the proof is generated by the requested authority set
if finality_proof.is_empty() {
let justification_check_result = J::decode_and_verify(
&proof_fragment.justification,
authorities_set_id,
&begin_authorities,
);
if justification_check_result.is_err() {
trace!(
target: "afg",
"Can not provide finality proof with requested set id #{}\
(possible forced change?). Returning empty proof.",
authorities_set_id,
);
return Ok(None)
}
}
finality_proof.push(proof_fragment);
latest_proof_fragment = None;
} else {
latest_proof_fragment = Some(proof_fragment);
}
// we don't need to provide more justifications
if justifies_end_block {
break
}
}
// we can't provide more justifications
if current_number == info.finalized_number {
// append last justification - even if we can't generate finality proof for
// the end block, we try to generate it for the latest possible block
if let Some(latest_proof_fragment) = latest_proof_fragment.take() {
finality_proof.push(latest_proof_fragment);
fragment_index += 1;
if fragment_index == MAX_FRAGMENTS_IN_PROOF {
break
}
}
break
}
// else search for the next justification
current_number += One::one();
}
if finality_proof.is_empty() {
trace!(
target: "afg",
"No justifications found when making finality proof for {}. Returning empty proof.",
end,
);
Ok(None)
} else {
trace!(
target: "afg",
"Built finality proof for {} of {} fragments. Last fragment for {}.",
end,
finality_proof.len(),
finality_proof.last().expect("checked that !finality_proof.is_empty(); qed").block,
);
Ok(Some(finality_proof.encode()))
}
}
/// Check GRANDPA proof-of-finality for the given block.
///
/// Returns the vector of headers that MUST be validated + imported
/// AND if at least one of those headers is invalid, all other MUST be considered invalid.
pub(crate) fn check_finality_proof<Block: BlockT, B, J>(
blockchain: &B,
current_set_id: u64,
current_authorities: AuthorityList,
authorities_provider: &dyn AuthoritySetForFinalityChecker<Block>,
remote_proof: Vec<u8>,
) -> ClientResult<FinalityEffects<Block::Header>>
where
NumberFor<Block>: BlockNumberOps,
B: BlockchainBackend<Block>,
J: ProvableJustification<Block::Header>,
{
// decode finality proof
let proof = FinalityProof::<Block::Header>::decode(&mut &remote_proof[..])
.map_err(|_| ClientError::BadJustification("failed to decode finality proof".into()))?;
// empty proof can't prove anything
if proof.is_empty() {
return Err(ClientError::BadJustification("empty proof of finality".into()))
}
// iterate and verify proof fragments
let last_fragment_index = proof.len() - 1;
let mut authorities = AuthoritiesOrEffects::Authorities(current_set_id, current_authorities);
for (proof_fragment_index, proof_fragment) in proof.into_iter().enumerate() {
// check that proof is non-redundant. The proof still can be valid, but
// we do not want peer to spam us with redundant data
if proof_fragment_index != last_fragment_index {
let has_unknown_headers = !proof_fragment.unknown_headers.is_empty();
let has_new_authorities = proof_fragment.authorities_proof.is_some();
if has_unknown_headers || !has_new_authorities {
return Err(ClientError::BadJustification("redundant proof of finality".into()))
}
}
authorities = check_finality_proof_fragment::<_, _, J>(
blockchain,
authorities,
authorities_provider,
proof_fragment,
)?;
}
let effects = authorities.extract_effects().expect(
"at least one loop iteration is guaranteed
because proof is not empty;\
check_finality_proof_fragment is called on every iteration;\
check_finality_proof_fragment always returns FinalityEffects;\
qed",
);
telemetry!(CONSENSUS_INFO; "afg.finality_proof_ok";
"set_id" => ?effects.new_set_id, "finalized_header_hash" => ?effects.block);
Ok(effects)
}
/// Check finality proof for the single block.
fn check_finality_proof_fragment<Block: BlockT, B, J>(
blockchain: &B,
authority_set: AuthoritiesOrEffects<Block::Header>,
authorities_provider: &dyn AuthoritySetForFinalityChecker<Block>,
proof_fragment: FinalityProofFragment<Block::Header>,
) -> ClientResult<AuthoritiesOrEffects<Block::Header>>
where
NumberFor<Block>: BlockNumberOps,
B: BlockchainBackend<Block>,
J: Decode + ProvableJustification<Block::Header>,
{
// verify justification using previous authorities set
let (mut current_set_id, mut current_authorities) = authority_set.extract_authorities();
let justification: J = Decode::decode(&mut &proof_fragment.justification[..])
.map_err(|_| ClientError::JustificationDecode)?;
justification.verify(current_set_id, ¤t_authorities)?;
// and now verify new authorities proof (if provided)
if let Some(new_authorities_proof) = proof_fragment.authorities_proof {
// the proof is either generated using known header and it is safe to query header
// here, because its non-finality proves that it can't be pruned
// or it is generated using last unknown header (because it is the one who has
// justification => we only generate proofs for headers with justifications)
let header = match proof_fragment.unknown_headers.iter().rev().next().cloned() {
Some(header) => header,
None => blockchain.expect_header(BlockId::Hash(proof_fragment.block))?,
};
current_authorities = authorities_provider.check_authorities_proof(
proof_fragment.block,
header,
new_authorities_proof,
)?;
current_set_id += 1;
}
Ok(AuthoritiesOrEffects::Effects(FinalityEffects {
headers_to_import: proof_fragment.unknown_headers,
block: proof_fragment.block,
justification: proof_fragment.justification,
new_set_id: current_set_id,
new_authorities: current_authorities,
}))
}
/// Authorities set from initial authorities set or finality effects.
enum AuthoritiesOrEffects<Header: HeaderT> {
Authorities(u64, AuthorityList),
Effects(FinalityEffects<Header>),
}
impl<Header: HeaderT> AuthoritiesOrEffects<Header> {
pub fn extract_authorities(self) -> (u64, AuthorityList) {
match self {
AuthoritiesOrEffects::Authorities(set_id, authorities) => (set_id, authorities),
AuthoritiesOrEffects::Effects(effects) => (effects.new_set_id, effects.new_authorities),
}
}
pub fn extract_effects(self) -> Option<FinalityEffects<Header>> {
match self {
AuthoritiesOrEffects::Authorities(_, _) => None,
AuthoritiesOrEffects::Effects(effects) => Some(effects),
}
}
}
/// Justification used to prove block finality.
pub(crate) trait ProvableJustification<Header: HeaderT>: Encode + Decode {
/// Verify justification with respect to authorities set and authorities set id.
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()>;
/// Decode and verify justification.
fn decode_and_verify(
justification: &Justification,
set_id: u64,
authorities: &[(AuthorityId, u64)],
) -> ClientResult<Self> {
let justification =
Self::decode(&mut &**justification).map_err(|_| ClientError::JustificationDecode)?;
justification.verify(set_id, authorities)?;
Ok(justification)
}
}
impl<Block: BlockT> ProvableJustification<Block::Header> for GrandpaJustification<Block>
where
NumberFor<Block>: BlockNumberOps,
{
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()> {
let authorities = VoterSet::new(authorities.iter().cloned())
.ok_or(ClientError::Consensus(sp_consensus::Error::InvalidAuthoritiesSet))?;
GrandpaJustification::verify(self, set_id, &authorities)
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use sc_client_api::in_mem::Blockchain as InMemoryBlockchain;
use sc_client_api::NewBlockState;
use sp_core::crypto::Public;
use substrate_test_runtime_client::runtime::{Block, Header, H256};
pub(crate) type FinalityProof = super::FinalityProof<Header>;
impl<GetAuthorities, ProveAuthorities> AuthoritySetForFinalityProver<Block>
for (GetAuthorities, ProveAuthorities)
where
GetAuthorities: Send + Sync + Fn(BlockId<Block>) -> ClientResult<AuthorityList>,
ProveAuthorities: Send + Sync + Fn(BlockId<Block>) -> ClientResult<StorageProof>,
{
fn authorities(&self, block: &BlockId<Block>) -> ClientResult<AuthorityList> {
self.0(*block)
}
fn prove_authorities(&self, block: &BlockId<Block>) -> ClientResult<StorageProof> {
self.1(*block)
}
}
pub(crate) struct ClosureAuthoritySetForFinalityChecker<Closure>(pub Closure);
impl<Closure> AuthoritySetForFinalityChecker<Block>
for ClosureAuthoritySetForFinalityChecker<Closure>
where
Closure: Send + Sync + Fn(H256, Header, StorageProof) -> ClientResult<AuthorityList>,
{
fn check_authorities_proof(
&self,
hash: H256,
header: Header,
proof: StorageProof,
) -> ClientResult<AuthorityList> {
self.0(hash, header, proof)
}
}
#[derive(Debug, PartialEq, Encode, Decode)]
pub struct TestJustification(pub (u64, AuthorityList), pub Vec<u8>);
impl ProvableJustification<Header> for TestJustification {
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()> {
if (self.0).0 != set_id || (self.0).1 != authorities {
return Err(ClientError::BadJustification("test".into()))
}
Ok(())
}
}
fn header(number: u64) -> Header {
let parent_hash = match number {
0 => Default::default(),
_ => header(number - 1).hash(),
};
Header::new(
number,
H256::from_low_u64_be(0),
H256::from_low_u64_be(0),
parent_hash,
Default::default(),
)
}
fn side_header(number: u64) -> Header {
Header::new(
number,
H256::from_low_u64_be(0),
H256::from_low_u64_be(1),
header(number - 1).hash(),
Default::default(),
)
}
fn second_side_header(number: u64) -> Header {
Header::new(
number,
H256::from_low_u64_be(0),
H256::from_low_u64_be(1),
side_header(number - 1).hash(),
Default::default(),
)
}
fn test_blockchain() -> InMemoryBlockchain<Block> {
let blockchain = InMemoryBlockchain::<Block>::new();
blockchain
.insert(header(0).hash(), header(0), Some(vec![0]), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(1).hash(), header(1), Some(vec![1]), None, NewBlockState::Final)
.unwrap();
blockchain.insert(header(2).hash(), header(2), None, None, NewBlockState::Best).unwrap();
blockchain
.insert(header(3).hash(), header(3), Some(vec![3]), None, NewBlockState::Final)
.unwrap();
blockchain
}
#[test]
fn finality_prove_fails_with_invalid_range() {
let blockchain = test_blockchain();
// their last finalized is: 2
// they request for proof-of-finality of: 2
// => range is invalid
prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| unreachable!("should return before calling GetAuthorities"),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
header(2).hash(),
header(2).hash(),
)
.unwrap_err();
}
#[test]
fn finality_proof_is_none_if_no_more_last_finalized_blocks() {
let blockchain = test_blockchain();
blockchain.insert(header(4).hash(), header(4), None, None, NewBlockState::Best).unwrap();
// our last finalized is: 3
// their last finalized is: 3
// => we can't provide any additional justifications
let proof_of_4 = prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| unreachable!("should return before calling GetAuthorities"),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
header(3).hash(),
header(4).hash(),
)
.unwrap();
assert_eq!(proof_of_4, None);
}
#[test]
fn finality_proof_fails_for_non_canonical_block() {
let blockchain = test_blockchain();
blockchain.insert(header(4).hash(), header(4), None, None, NewBlockState::Best).unwrap();
blockchain
.insert(side_header(4).hash(), side_header(4), None, None, NewBlockState::Best)
.unwrap();
blockchain
.insert(
second_side_header(5).hash(),
second_side_header(5),
None,
None,
NewBlockState::Best,
)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), Some(vec![5]), None, NewBlockState::Final)
.unwrap();
// chain is 1 -> 2 -> 3 -> 4 -> 5
// \> 4' -> 5'
// and the best finalized is 5
// => when requesting for (4'; 5'], error is returned
prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| unreachable!("should return before calling GetAuthorities"),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
side_header(4).hash(),
second_side_header(5).hash(),
)
.unwrap_err();
}
#[test]
fn finality_proof_is_none_if_no_justification_known() {
let blockchain = test_blockchain();
blockchain.insert(header(4).hash(), header(4), None, None, NewBlockState::Final).unwrap();
// block 4 is finalized without justification
// => we can't prove finality
let proof_of_4 = prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| Ok(vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)]),
|_| unreachable!("authorities didn't change => ProveAuthorities won't be called"),
),
0,
header(3).hash(),
header(4).hash(),
)
.unwrap();
assert_eq!(proof_of_4, None);
}
#[test]
fn finality_proof_works_without_authorities_change() {
let blockchain = test_blockchain();
let authorities = vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)];
let just4 = TestJustification((0, authorities.clone()), vec![4]).encode();
let just5 = TestJustification((0, authorities.clone()), vec![5]).encode();
blockchain
.insert(header(4).hash(), header(4), Some(just4), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), Some(just5.clone()), None, NewBlockState::Final)
.unwrap();
// blocks 4 && 5 are finalized with justification
// => since authorities are the same, we only need justification for 5
let proof_of_5: FinalityProof = Decode::decode(
&mut &prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| Ok(authorities.clone()),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
header(3).hash(),
header(5).hash(),
)
.unwrap()
.unwrap()[..],
)
.unwrap();
assert_eq!(
proof_of_5,
vec![FinalityProofFragment {
block: header(5).hash(),
justification: just5,
unknown_headers: Vec::new(),
authorities_proof: None,
}]
);
}
#[test]
fn finality_proof_finalized_earlier_block_if_no_justification_for_target_is_known() {
let blockchain = test_blockchain();
blockchain
.insert(header(4).hash(), header(4), Some(vec![4]), None, NewBlockState::Final)
.unwrap();
blockchain.insert(header(5).hash(), header(5), None, None, NewBlockState::Final).unwrap();
// block 4 is finalized with justification + we request for finality of 5
// => we can't prove finality of 5, but providing finality for 4 is still useful for
// requester
let proof_of_5: FinalityProof = Decode::decode(
&mut &prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| Ok(vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)]),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
header(3).hash(),
header(5).hash(),
)
.unwrap()
.unwrap()[..],
)
.unwrap();
assert_eq!(
proof_of_5,
vec![FinalityProofFragment {
block: header(4).hash(),
justification: vec![4],
unknown_headers: Vec::new(),
authorities_proof: None,
}]
);
}
#[test]
fn finality_proof_works_with_authorities_change() {
let blockchain = test_blockchain();
let auth3 = vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)];
let auth5 = vec![(AuthorityId::from_slice(&[5u8; 32]), 1u64)];
let auth7 = vec![(AuthorityId::from_slice(&[7u8; 32]), 1u64)];
let just4 = TestJustification((0, auth3.clone()), vec![4]).encode();
let just5 = TestJustification((0, auth3.clone()), vec![5]).encode();
let just7 = TestJustification((1, auth5.clone()), vec![7]).encode();
blockchain
.insert(header(4).hash(), header(4), Some(just4), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), Some(just5.clone()), None, NewBlockState::Final)
.unwrap();
blockchain.insert(header(6).hash(), header(6), None, None, NewBlockState::Final).unwrap();
blockchain
.insert(header(7).hash(), header(7), Some(just7.clone()), None, NewBlockState::Final)
.unwrap();
// when querying for finality of 6, we assume that the #3 is the last block known to the
// requester => since we only have justification for #7, we provide #7
let proof_of_6: FinalityProof = Decode::decode(
&mut &prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|block_id| match block_id {
BlockId::Hash(h) if h == header(3).hash() => Ok(auth3.clone()),
BlockId::Number(4) => Ok(auth3.clone()),
BlockId::Number(5) => Ok(auth5.clone()),
BlockId::Number(7) => Ok(auth7.clone()),
_ => unreachable!("no other authorities should be fetched: {:?}", block_id),
},
|block_id| match block_id {
BlockId::Number(5) => Ok(StorageProof::new(vec![vec![50]])),
BlockId::Number(7) => Ok(StorageProof::new(vec![vec![70]])),
_ => unreachable!("no other authorities should be proved: {:?}", block_id),
},
),
0,
header(3).hash(),
header(6).hash(),
)
.unwrap()
.unwrap()[..],
)
.unwrap();
// initial authorities set (which start acting from #0) is [3; 32]
assert_eq!(
proof_of_6,
vec![
// new authorities set starts acting from #5 => we do not provide fragment for #4
// first fragment provides justification for #5 && authorities set that starts
// acting from #5
FinalityProofFragment {
block: header(5).hash(),
justification: just5,
unknown_headers: Vec::new(),
authorities_proof: Some(StorageProof::new(vec![vec![50]])),
},
// last fragment provides justification for #7 && unknown#7
FinalityProofFragment {
block: header(7).hash(),
justification: just7.clone(),
unknown_headers: vec![header(7)],
authorities_proof: Some(StorageProof::new(vec![vec![70]])),
},
]
);
// now let's verify finality proof
let blockchain = test_blockchain();
blockchain.insert(header(4).hash(), header(4), None, None, NewBlockState::Final).unwrap();
blockchain.insert(header(5).hash(), header(5), None, None, NewBlockState::Final).unwrap();
blockchain.insert(header(6).hash(), header(6), None, None, NewBlockState::Final).unwrap();
let effects =
check_finality_proof::<_, _, TestJustification>(
&blockchain,
0,
auth3,
&ClosureAuthoritySetForFinalityChecker(|hash, _header, proof: StorageProof| {
match proof.clone().iter_nodes().next().map(|x| x[0]) {
Some(50) => Ok(auth5.clone()),
Some(70) => Ok(auth7.clone()),
_ => unreachable!("no other proofs should be checked: {}", hash),
}
}),
proof_of_6.encode(),
)
.unwrap();
assert_eq!(
effects,
FinalityEffects {
headers_to_import: vec![header(7)],
block: header(7).hash(),
justification: TestJustification((1, auth5.clone()), vec![7]).encode(),
new_set_id: 2,
new_authorities: auth7,
}
);
}
#[test]
fn finality_proof_check_fails_when_proof_decode_fails() {
let blockchain = test_blockchain();
// when we can't decode proof from Vec<u8>
check_finality_proof::<_, _, TestJustification>(
&blockchain,
1,
vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)],
&ClosureAuthoritySetForFinalityChecker(|_, _, _| {
unreachable!("returns before CheckAuthoritiesProof")
}),
vec![42],
)
.unwrap_err();
}
#[test]
fn finality_proof_check_fails_when_proof_is_empty() {
let blockchain = test_blockchain();
// when decoded proof has zero length
check_finality_proof::<_, _, TestJustification>(
&blockchain,
1,
vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)],
&ClosureAuthoritySetForFinalityChecker(|_, _, _| {
unreachable!("returns before CheckAuthoritiesProof")
}),
Vec::<TestJustification>::new().encode(),
)
.unwrap_err();
}
#[test]
fn finality_proof_check_fails_when_intermediate_fragment_has_unknown_headers() {
let blockchain = test_blockchain();
// when intermediate (#0) fragment has non-empty unknown headers
let authorities = vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)];
check_finality_proof::<_, _, TestJustification>(
&blockchain,
1,
authorities.clone(),
&ClosureAuthoritySetForFinalityChecker(|_, _, _| {
unreachable!("returns before CheckAuthoritiesProof")
}),
vec![
FinalityProofFragment {
block: header(4).hash(),
justification: TestJustification((0, authorities.clone()), vec![7]).encode(),
unknown_headers: vec![header(4)],
authorities_proof: Some(StorageProof::new(vec![vec![42]])),
},
FinalityProofFragment {
block: header(5).hash(),
justification: TestJustification((0, authorities), vec![8]).encode(),
unknown_headers: vec![header(5)],
authorities_proof: None,
},
]
.encode(),
)
.unwrap_err();
}
#[test]
fn finality_proof_check_fails_when_intermediate_fragment_has_no_authorities_proof() {
let blockchain = test_blockchain();
// when intermediate (#0) fragment has empty authorities proof
let authorities = vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)];
check_finality_proof::<_, _, TestJustification>(
&blockchain,
1,
authorities.clone(),
&ClosureAuthoritySetForFinalityChecker(|_, _, _| {
unreachable!("returns before CheckAuthoritiesProof")
}),
vec![
FinalityProofFragment {
block: header(4).hash(),
justification: TestJustification((0, authorities.clone()), vec![7]).encode(),
unknown_headers: Vec::new(),
authorities_proof: None,
},
FinalityProofFragment {
block: header(5).hash(),
justification: TestJustification((0, authorities), vec![8]).encode(),
unknown_headers: vec![header(5)],
authorities_proof: None,
},
]
.encode(),
)
.unwrap_err();
}
#[test]
fn finality_proof_check_works() {
let blockchain = test_blockchain();
let initial_authorities = vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)];
let next_authorities = vec![(AuthorityId::from_slice(&[4u8; 32]), 1u64)];
let effects = check_finality_proof::<_, _, TestJustification>(
&blockchain,
1,
initial_authorities.clone(),
&ClosureAuthoritySetForFinalityChecker(|_, _, _| Ok(next_authorities.clone())),
vec![
FinalityProofFragment {
block: header(2).hash(),
justification: TestJustification((1, initial_authorities.clone()), vec![7])
.encode(),
unknown_headers: Vec::new(),
authorities_proof: Some(StorageProof::new(vec![vec![42]])),
},
FinalityProofFragment {
block: header(4).hash(),
justification: TestJustification((2, next_authorities.clone()), vec![8])
.encode(),
unknown_headers: vec![header(4)],
authorities_proof: None,
},
]
.encode(),
)
.unwrap();
assert_eq!(
effects,
FinalityEffects {
headers_to_import: vec![header(4)],
block: header(4).hash(),
justification: TestJustification((2, next_authorities.clone()), vec![8]).encode(),
new_set_id: 2,
new_authorities: vec![(AuthorityId::from_slice(&[4u8; 32]), 1u64)],
}
);
}
#[test]
fn finality_proof_is_none_if_first_justification_is_generated_by_unknown_set() {
// this is the case for forced change: set_id has been forcibly increased on full node
// and light node missed that
// => justification verification will fail on light node anyways, so we do not return
// finality proof at all
let blockchain = test_blockchain();
let just4 =
TestJustification((0, vec![(AuthorityId::from_slice(&[42u8; 32]), 1u64)]), vec![4])
.encode();
blockchain
.insert(header(4).hash(), header(4), Some(just4), None, NewBlockState::Final)
.unwrap();
let proof_of_4 = prove_finality::<_, _, TestJustification>(
&blockchain,
&(
|_| Ok(vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)]),
|_| unreachable!("should return before calling ProveAuthorities"),
),
0,
header(3).hash(),
header(4).hash(),
)
.unwrap();
assert!(proof_of_4.is_none());
}
}
|
use SafeWrapper;
use ir::{User, Instruction, Value, BinaryOpCode};
use sys;
/// An binary operator instruction.
pub struct BinaryOperatorInst<'ctx>(Instruction<'ctx>);
impl<'ctx> BinaryOperatorInst<'ctx>
{
/// Creates a new extract element instruction.
pub fn new(opcode: BinaryOpCode,
lhs: &Value,
rhs: &Value) -> Self {
unsafe {
let inner = sys::LLVMRustCreateBinaryOperator(opcode as _, lhs.inner(), rhs.inner());
wrap_value!(inner => User => Instruction => BinaryOperatorInst)
}
}
/// Creates a new 'no signed wrap' binary op.
pub fn new_nsw(opcode: BinaryOpCode,
lhs: &Value,
rhs: &Value) -> Self {
unsafe {
let inner = sys::LLVMRustCreateBinaryOperatorNSW(opcode as _, lhs.inner(), rhs.inner());
wrap_value!(inner => User => Instruction => BinaryOperatorInst)
}
}
/// Creates a new 'no unsigned wrap' binary op.
pub fn new_nuw(opcode: BinaryOpCode,
lhs: &Value,
rhs: &Value) -> Self {
unsafe {
let inner = sys::LLVMRustCreateBinaryOperatorNUW(opcode as _, lhs.inner(), rhs.inner());
wrap_value!(inner => User => Instruction => BinaryOperatorInst)
}
}
/// Creates a new 'exact' binary op.
pub fn new_exact(opcode: BinaryOpCode,
lhs: &Value,
rhs: &Value) -> Self {
unsafe {
let inner = sys::LLVMRustCreateBinaryOperatorExact(opcode as _, lhs.inner(), rhs.inner());
wrap_value!(inner => User => Instruction => BinaryOperatorInst)
}
}
}
impl_subtype!(BinaryOperatorInst => Instruction);
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! List-cache storage entries.
use codec::{Decode, Encode};
use sp_blockchain::Result as ClientResult;
use sp_runtime::traits::{Block as BlockT, NumberFor};
use crate::cache::list_storage::Storage;
use crate::cache::{CacheItemT, ComplexBlockId};
/// Single list-based cache entry.
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct Entry<Block: BlockT, T> {
/// first block, when this value became actual.
pub valid_from: ComplexBlockId<Block>,
/// Value stored at this entry.
pub value: T,
}
/// Internal representation of the single list-based cache entry. The entry points to the
/// previous entry in the cache, allowing us to traverse back in time in list-style.
#[derive(Debug, Encode, Decode)]
#[cfg_attr(test, derive(Clone, PartialEq))]
pub struct StorageEntry<Block: BlockT, T: CacheItemT> {
/// None if valid from the beginning.
pub prev_valid_from: Option<ComplexBlockId<Block>>,
/// Value stored at this entry.
pub value: T,
}
impl<Block: BlockT, T: CacheItemT> Entry<Block, T> {
/// Returns Some if the entry should be updated with the new value.
pub fn try_update(&self, value: Option<T>) -> Option<StorageEntry<Block, T>> {
match value {
Some(value) => match self.value == value {
true => None,
false =>
Some(StorageEntry { prev_valid_from: Some(self.valid_from.clone()), value }),
},
None => None,
}
}
/// Wrapper that calls search_before to get range where the given block fits.
pub fn search_best_range_before<S: Storage<Block, T>>(
&self,
storage: &S,
block: NumberFor<Block>,
) -> ClientResult<Option<(ComplexBlockId<Block>, Option<ComplexBlockId<Block>>)>> {
Ok(self.search_best_before(storage, block)?.map(|(entry, next)| (entry.valid_from, next)))
}
/// Searches the list, ending with THIS entry for the best entry preceding (or at)
/// given block number.
/// If the entry is found, result is the entry and the block id of next entry (if exists).
/// NOTE that this function does not check that the passed block is actually linked to
/// the blocks it found.
pub fn search_best_before<S: Storage<Block, T>>(
&self,
storage: &S,
block: NumberFor<Block>,
) -> ClientResult<Option<(Entry<Block, T>, Option<ComplexBlockId<Block>>)>> {
// we're looking for the best value
let mut next = None;
let mut current = self.valid_from.clone();
if block >= self.valid_from.number {
let value = self.value.clone();
return Ok(Some((Entry { valid_from: current, value }, next)))
}
// else - travel back in time
loop {
let entry = storage.require_entry(¤t)?;
if block >= current.number {
return Ok(Some((Entry { valid_from: current, value: entry.value }, next)))
}
next = Some(current);
current = match entry.prev_valid_from {
Some(prev_valid_from) => prev_valid_from,
None => return Ok(None),
};
}
}
}
impl<Block: BlockT, T: CacheItemT> StorageEntry<Block, T> {
/// Converts storage entry into an entry, valid from given block.
pub fn into_entry(self, valid_from: ComplexBlockId<Block>) -> Entry<Block, T> {
Entry { valid_from, value: self.value }
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::cache::list_storage::tests::{DummyStorage, FaultyStorage};
use substrate_test_runtime_client::runtime::{Block, H256};
fn test_id(number: u64) -> ComplexBlockId<Block> {
ComplexBlockId::new(H256::from_low_u64_be(number), number)
}
#[test]
fn entry_try_update_works() {
// when trying to update with None value
assert_eq!(Entry::<_, u64> { valid_from: test_id(1), value: 42 }.try_update(None), None);
// when trying to update with the same Some value
assert_eq!(Entry { valid_from: test_id(1), value: 1 }.try_update(Some(1)), None);
// when trying to update with different Some value
assert_eq!(
Entry { valid_from: test_id(1), value: 1 }.try_update(Some(2)),
Some(StorageEntry { prev_valid_from: Some(test_id(1)), value: 2 })
);
}
#[test]
fn entry_search_best_before_fails() {
// when storage returns error
assert!(Entry::<_, u64> { valid_from: test_id(100), value: 42 }
.search_best_before(&FaultyStorage, 50)
.is_err());
}
#[test]
fn entry_search_best_before_works() {
// when block is better than our best block
assert_eq!(
Entry::<_, u64> { valid_from: test_id(100), value: 100 }
.search_best_before(&DummyStorage::new(), 150)
.unwrap(),
Some((Entry::<_, u64> { valid_from: test_id(100), value: 100 }, None))
);
// when block is found between two entries
assert_eq!(
Entry::<_, u64> { valid_from: test_id(100), value: 100 }
.search_best_before(
&DummyStorage::new()
.with_entry(
test_id(100),
StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }
)
.with_entry(
test_id(50),
StorageEntry { prev_valid_from: Some(test_id(30)), value: 50 }
),
75
)
.unwrap(),
Some((Entry::<_, u64> { valid_from: test_id(50), value: 50 }, Some(test_id(100))))
);
// when block is not found
assert_eq!(
Entry::<_, u64> { valid_from: test_id(100), value: 100 }
.search_best_before(
&DummyStorage::new()
.with_entry(
test_id(100),
StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }
)
.with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }),
30
)
.unwrap(),
None
);
}
}
|
use error::{Error, Result};
use gssapi_sys;
use name::Name;
use oid::OID;
use oid_set::OIDSet;
use std::ptr;
#[cfg(feature = "services4user")]
use std::ffi::CString;
#[derive(Debug)]
pub struct Credentials {
cred_handle: gssapi_sys::gss_cred_id_t,
mechs: OIDSet,
time_rec: u32,
}
impl Credentials {
pub fn accept<T: Into<Name>>(desired_name: T) -> CredentialsBuilder {
CredentialsBuilder::new(desired_name)
}
pub fn mechs(&self) -> &OIDSet {
&self.mechs
}
pub fn time_rec(&self) -> u32 {
self.time_rec
}
pub unsafe fn get_handle(&self) -> gssapi_sys::gss_cred_id_t {
self.cred_handle
}
#[cfg(feature = "services4user")]
pub fn impersonate<T: Into<Name>>(self, desired_name: T) -> CredentialsBuilder {
CredentialsBuilder::new(desired_name).impersonator(self)
}
#[cfg(feature = "services4user")]
pub fn store_into(self, cred_store: &Vec<(CString, CString)>) -> Result<bool> {
let input_usage = 0;
let desired_mech = ptr::null_mut();
let overwrite_cred = 1;
let default_cred = 0;
let mut elements_stored = try!(OIDSet::empty());
let mut cred_usage_stored = 0;
let mut minor_status = 0;
let mut elements = cred_store.into_iter()
.map(|&(ref e1, ref e2)| {
gssapi_sys::gss_key_value_element_struct {
key: e1.as_ptr(),
value: e2.as_ptr(),
}
})
.collect();
let mut gss_cred_store = gssapi_sys::gss_key_value_set_struct {
count: cred_store.len() as u32,
elements: elements.as_mut_ptr(),
};
let major_status = unsafe {
// Example usage: https://github.com/krb5/krb5/blob/master/src/tests/gssapi/t_credstore.c#L779
gssapi_sys::gss_store_cred_into(
&mut minor_status,
self.cred_handle,
input_usage,
desired_mech,
overwrite_cred,
default_cred,
&mut gss_cred_store,
&mut elements_stored.get_handle(),
&mut cred_usage_stored
)
};
if major_status == gssapi_sys::GSS_S_COMPLETE {
Ok(true)
} else {
Err(Error::new(major_status, minor_status, OID::empty()))
}
}
}
impl Drop for Credentials {
fn drop(&mut self) {
let mut minor_status = 0;
let major_status = unsafe {
gssapi_sys::gss_release_cred(
&mut minor_status,
&mut self.cred_handle)
};
if major_status != gssapi_sys::GSS_S_COMPLETE {
panic!("{}", Error::new(major_status, minor_status, OID::empty()))
}
}
}
#[cfg(feature = "services4user")]
pub struct CredentialsBuilder {
desired_name: Name,
time_req: u32,
desired_mechs: OIDSet,
cred_usage: isize,
impersonator: Option<Credentials>
}
#[cfg(not(feature = "services4user"))]
pub struct CredentialsBuilder {
desired_name: Name,
time_req: u32,
desired_mechs: OIDSet,
cred_usage: isize,
}
impl CredentialsBuilder {
#[cfg(feature = "services4user")]
pub fn new<T: Into<Name>>(desired_name: T) -> Self {
CredentialsBuilder {
desired_name: desired_name.into(),
time_req: 0,
desired_mechs: OIDSet::c_no_oid_set(),
cred_usage: 0,
impersonator: None
}
}
#[cfg(not(feature = "services4user"))]
pub fn new<T: Into<Name>>(desired_name: T) -> Self {
CredentialsBuilder {
desired_name: desired_name.into(),
time_req: 0,
desired_mechs: OIDSet::empty().unwrap(),
cred_usage: 0,
}
}
pub fn time_req(mut self, time_req: u32) -> Self {
self.time_req = time_req;
self
}
#[cfg(feature = "services4user")]
pub fn impersonator(mut self, impersonator: Credentials) -> Self {
self.impersonator = Some(impersonator);
self
}
pub fn desired_mechs(mut self, desired_mechs: OIDSet) -> Self {
self.desired_mechs = desired_mechs;
self
}
#[cfg(feature = "services4user")]
pub fn build(self) -> Result<Credentials> {
let mut minor_status = 0;
let mut output_cred_handle: gssapi_sys::gss_cred_id_t = ptr::null_mut();
let actual_mechs = try!(OIDSet::empty());
let mut time_rec = 0;
let major_status = match self.impersonator {
None => unsafe {
gssapi_sys::gss_acquire_cred(
&mut minor_status,
self.desired_name.get_handle(),
self.time_req,
self.desired_mechs.get_handle(),
self.cred_usage as gssapi_sys::gss_cred_usage_t,
&mut output_cred_handle,
&mut actual_mechs.get_handle(),
&mut time_rec,
)
},
Some(cred) => unsafe {
gssapi_sys::gss_acquire_cred_impersonate_name(
&mut minor_status, /* minor_status */
cred.get_handle(), /* impersonator_cred_handle */
self.desired_name.get_handle(), /* desired_name */
self.time_req, /* time_req */
self.desired_mechs.get_handle(), /* desired_mechs */
self.cred_usage as gssapi_sys::gss_cred_usage_t, /* cred_usage */
&mut output_cred_handle, /* output_cred_handle */
&mut actual_mechs.get_handle(), /* actual_mechs */
&mut time_rec, /* time_rec */
)
},
};
if major_status == gssapi_sys::GSS_S_COMPLETE {
Ok(Credentials {
cred_handle: output_cred_handle,
mechs: actual_mechs,
time_rec: time_rec,
})
} else {
Err(Error::new(major_status, minor_status, OID::empty()))
}
}
#[cfg(not(feature = "services4user"))]
pub fn build(self) -> Result<Credentials> {
let mut minor_status = 0;
let mut output_cred_handle = ptr::null_mut();
let actual_mechs = try!(OIDSet::empty());
let mut time_rec = 0;
let major_status = unsafe {
gssapi_sys::gss_acquire_cred(
&mut minor_status,
self.desired_name.get_handle(),
self.time_req,
self.desired_mechs.get_handle(),
self.cred_usage as gssapi_sys::gss_cred_usage_t,
&mut output_cred_handle,
&mut actual_mechs.get_handle(),
&mut time_rec,
)
};
if major_status == gssapi_sys::GSS_S_COMPLETE {
Ok(Credentials {
cred_handle: output_cred_handle,
mechs: actual_mechs,
time_rec: time_rec,
})
} else {
Err(Error::new(major_status, minor_status, OID::empty()))
}
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method `check_can_delete_slo`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CheckCanDeleteSloError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
Status409(crate::models::CheckCanDeleteSloResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `create_slo`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateSloError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `delete_slo`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DeleteSloError {
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::ApiErrorResponse),
Status409(crate::models::SloDeleteResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `delete_slo_timeframe_in_bulk`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DeleteSloTimeframeInBulkError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_slo`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetSloError {
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_slo_history`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetSloHistoryError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `list_slos`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ListSlosError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `update_slo`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum UpdateSloError {
Status400(crate::models::ApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// Check if an SLO can be safely deleted. For example, assure an SLO can be deleted without disrupting a dashboard.
pub async fn check_can_delete_slo(configuration: &configuration::Configuration, ids: &str) -> Result<crate::models::CheckCanDeleteSloResponse, Error<CheckCanDeleteSloError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/can_delete", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
local_var_req_builder = local_var_req_builder.query(&[("ids", &ids.to_string())]);
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CheckCanDeleteSloError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Create a service level objective object.
pub async fn create_slo(configuration: &configuration::Configuration, body: crate::models::ServiceLevelObjectiveRequest) -> Result<crate::models::SloListResponse, Error<CreateSloError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateSloError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Permanently delete the specified service level objective object. If an SLO is used in a dashboard, the `DELETE /v1/slo/` endpoint returns a 409 conflict error because the SLO is referenced in a dashboard.
pub async fn delete_slo(configuration: &configuration::Configuration, slo_id: &str, force: Option<&str>) -> Result<crate::models::SloDeleteResponse, Error<DeleteSloError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/{slo_id}", configuration.base_path, slo_id=crate::apis::urlencode(slo_id));
let mut local_var_req_builder = local_var_client.delete(local_var_uri_str.as_str());
if let Some(ref local_var_str) = force {
local_var_req_builder = local_var_req_builder.query(&[("force", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DeleteSloError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Delete (or partially delete) multiple service level objective objects. This endpoint facilitates deletion of one or more thresholds for one or more service level objective objects. If all thresholds are deleted, the service level objective object is deleted as well.
pub async fn delete_slo_timeframe_in_bulk(configuration: &configuration::Configuration, body: ::std::collections::HashMap<String, Vec<crate::models::SloTimeframe>>) -> Result<crate::models::SloBulkDeleteResponse, Error<DeleteSloTimeframeInBulkError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/bulk_delete", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DeleteSloTimeframeInBulkError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get a service level objective object.
pub async fn get_slo(configuration: &configuration::Configuration, slo_id: &str) -> Result<crate::models::SloResponse, Error<GetSloError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/{slo_id}", configuration.base_path, slo_id=crate::apis::urlencode(slo_id));
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetSloError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get a specific SLO’s history, regardless of its SLO type. The detailed history data is structured according to the source data type. For example, metric data is included for event SLOs that use the metric source, and monitor SLO types include the monitor transition history. **Note:** There are different response formats for event based and time based SLOs. Examples of both are shown.
pub async fn get_slo_history(configuration: &configuration::Configuration, slo_id: &str, from_ts: i64, to_ts: i64, target: Option<f64>) -> Result<crate::models::SloHistoryResponse, Error<GetSloHistoryError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/{slo_id}/history", configuration.base_path, slo_id=crate::apis::urlencode(slo_id));
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
local_var_req_builder = local_var_req_builder.query(&[("from_ts", &from_ts.to_string())]);
local_var_req_builder = local_var_req_builder.query(&[("to_ts", &to_ts.to_string())]);
if let Some(ref local_var_str) = target {
local_var_req_builder = local_var_req_builder.query(&[("target", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetSloHistoryError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get a list of service level objective objects for your organization.
pub async fn list_slos(configuration: &configuration::Configuration, ids: Option<&str>, query: Option<&str>, tags_query: Option<&str>, metrics_query: Option<&str>) -> Result<crate::models::SloListResponse, Error<ListSlosError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = ids {
local_var_req_builder = local_var_req_builder.query(&[("ids", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = query {
local_var_req_builder = local_var_req_builder.query(&[("query", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = tags_query {
local_var_req_builder = local_var_req_builder.query(&[("tags_query", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = metrics_query {
local_var_req_builder = local_var_req_builder.query(&[("metrics_query", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<ListSlosError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Update the specified service level objective object.
pub async fn update_slo(configuration: &configuration::Configuration, slo_id: &str, body: crate::models::ServiceLevelObjective) -> Result<crate::models::SloListResponse, Error<UpdateSloError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/slo/{slo_id}", configuration.base_path, slo_id=crate::apis::urlencode(slo_id));
let mut local_var_req_builder = local_var_client.put(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<UpdateSloError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
|
pub mod envpath;
pub mod hash;
pub mod name;
|
#[doc = "Register `RCC_MC_RSTSCLRR` reader"]
pub type R = crate::R<RCC_MC_RSTSCLRR_SPEC>;
#[doc = "Register `RCC_MC_RSTSCLRR` writer"]
pub type W = crate::W<RCC_MC_RSTSCLRR_SPEC>;
#[doc = "Field `PORRSTF` reader - PORRSTF"]
pub type PORRSTF_R = crate::BitReader;
#[doc = "Field `PORRSTF` writer - PORRSTF"]
pub type PORRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `BORRSTF` reader - BORRSTF"]
pub type BORRSTF_R = crate::BitReader;
#[doc = "Field `BORRSTF` writer - BORRSTF"]
pub type BORRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PADRSTF` reader - PADRSTF"]
pub type PADRSTF_R = crate::BitReader;
#[doc = "Field `PADRSTF` writer - PADRSTF"]
pub type PADRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `HCSSRSTF` reader - HCSSRSTF"]
pub type HCSSRSTF_R = crate::BitReader;
#[doc = "Field `HCSSRSTF` writer - HCSSRSTF"]
pub type HCSSRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VCORERSTF` reader - VCORERSTF"]
pub type VCORERSTF_R = crate::BitReader;
#[doc = "Field `VCORERSTF` writer - VCORERSTF"]
pub type VCORERSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MCURSTF` reader - MCURSTF"]
pub type MCURSTF_R = crate::BitReader;
#[doc = "Field `MCURSTF` writer - MCURSTF"]
pub type MCURSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MPSYSRSTF` reader - MPSYSRSTF"]
pub type MPSYSRSTF_R = crate::BitReader;
#[doc = "Field `MPSYSRSTF` writer - MPSYSRSTF"]
pub type MPSYSRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MCSYSRSTF` reader - MCSYSRSTF"]
pub type MCSYSRSTF_R = crate::BitReader;
#[doc = "Field `MCSYSRSTF` writer - MCSYSRSTF"]
pub type MCSYSRSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IWDG1RSTF` reader - IWDG1RSTF"]
pub type IWDG1RSTF_R = crate::BitReader;
#[doc = "Field `IWDG1RSTF` writer - IWDG1RSTF"]
pub type IWDG1RSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `IWDG2RSTF` reader - IWDG2RSTF"]
pub type IWDG2RSTF_R = crate::BitReader;
#[doc = "Field `IWDG2RSTF` writer - IWDG2RSTF"]
pub type IWDG2RSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `WWDG1RSTF` reader - WWDG1RSTF"]
pub type WWDG1RSTF_R = crate::BitReader;
#[doc = "Field `WWDG1RSTF` writer - WWDG1RSTF"]
pub type WWDG1RSTF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - PORRSTF"]
#[inline(always)]
pub fn porrstf(&self) -> PORRSTF_R {
PORRSTF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - BORRSTF"]
#[inline(always)]
pub fn borrstf(&self) -> BORRSTF_R {
BORRSTF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - PADRSTF"]
#[inline(always)]
pub fn padrstf(&self) -> PADRSTF_R {
PADRSTF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - HCSSRSTF"]
#[inline(always)]
pub fn hcssrstf(&self) -> HCSSRSTF_R {
HCSSRSTF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - VCORERSTF"]
#[inline(always)]
pub fn vcorerstf(&self) -> VCORERSTF_R {
VCORERSTF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - MCURSTF"]
#[inline(always)]
pub fn mcurstf(&self) -> MCURSTF_R {
MCURSTF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - MPSYSRSTF"]
#[inline(always)]
pub fn mpsysrstf(&self) -> MPSYSRSTF_R {
MPSYSRSTF_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - MCSYSRSTF"]
#[inline(always)]
pub fn mcsysrstf(&self) -> MCSYSRSTF_R {
MCSYSRSTF_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - IWDG1RSTF"]
#[inline(always)]
pub fn iwdg1rstf(&self) -> IWDG1RSTF_R {
IWDG1RSTF_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - IWDG2RSTF"]
#[inline(always)]
pub fn iwdg2rstf(&self) -> IWDG2RSTF_R {
IWDG2RSTF_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - WWDG1RSTF"]
#[inline(always)]
pub fn wwdg1rstf(&self) -> WWDG1RSTF_R {
WWDG1RSTF_R::new(((self.bits >> 10) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - PORRSTF"]
#[inline(always)]
#[must_use]
pub fn porrstf(&mut self) -> PORRSTF_W<RCC_MC_RSTSCLRR_SPEC, 0> {
PORRSTF_W::new(self)
}
#[doc = "Bit 1 - BORRSTF"]
#[inline(always)]
#[must_use]
pub fn borrstf(&mut self) -> BORRSTF_W<RCC_MC_RSTSCLRR_SPEC, 1> {
BORRSTF_W::new(self)
}
#[doc = "Bit 2 - PADRSTF"]
#[inline(always)]
#[must_use]
pub fn padrstf(&mut self) -> PADRSTF_W<RCC_MC_RSTSCLRR_SPEC, 2> {
PADRSTF_W::new(self)
}
#[doc = "Bit 3 - HCSSRSTF"]
#[inline(always)]
#[must_use]
pub fn hcssrstf(&mut self) -> HCSSRSTF_W<RCC_MC_RSTSCLRR_SPEC, 3> {
HCSSRSTF_W::new(self)
}
#[doc = "Bit 4 - VCORERSTF"]
#[inline(always)]
#[must_use]
pub fn vcorerstf(&mut self) -> VCORERSTF_W<RCC_MC_RSTSCLRR_SPEC, 4> {
VCORERSTF_W::new(self)
}
#[doc = "Bit 5 - MCURSTF"]
#[inline(always)]
#[must_use]
pub fn mcurstf(&mut self) -> MCURSTF_W<RCC_MC_RSTSCLRR_SPEC, 5> {
MCURSTF_W::new(self)
}
#[doc = "Bit 6 - MPSYSRSTF"]
#[inline(always)]
#[must_use]
pub fn mpsysrstf(&mut self) -> MPSYSRSTF_W<RCC_MC_RSTSCLRR_SPEC, 6> {
MPSYSRSTF_W::new(self)
}
#[doc = "Bit 7 - MCSYSRSTF"]
#[inline(always)]
#[must_use]
pub fn mcsysrstf(&mut self) -> MCSYSRSTF_W<RCC_MC_RSTSCLRR_SPEC, 7> {
MCSYSRSTF_W::new(self)
}
#[doc = "Bit 8 - IWDG1RSTF"]
#[inline(always)]
#[must_use]
pub fn iwdg1rstf(&mut self) -> IWDG1RSTF_W<RCC_MC_RSTSCLRR_SPEC, 8> {
IWDG1RSTF_W::new(self)
}
#[doc = "Bit 9 - IWDG2RSTF"]
#[inline(always)]
#[must_use]
pub fn iwdg2rstf(&mut self) -> IWDG2RSTF_W<RCC_MC_RSTSCLRR_SPEC, 9> {
IWDG2RSTF_W::new(self)
}
#[doc = "Bit 10 - WWDG1RSTF"]
#[inline(always)]
#[must_use]
pub fn wwdg1rstf(&mut self) -> WWDG1RSTF_W<RCC_MC_RSTSCLRR_SPEC, 10> {
WWDG1RSTF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used by the MCU to check the reset source.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_mc_rstsclrr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_mc_rstsclrr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_MC_RSTSCLRR_SPEC;
impl crate::RegisterSpec for RCC_MC_RSTSCLRR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_mc_rstsclrr::R`](R) reader structure"]
impl crate::Readable for RCC_MC_RSTSCLRR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_mc_rstsclrr::W`](W) writer structure"]
impl crate::Writable for RCC_MC_RSTSCLRR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_MC_RSTSCLRR to value 0x15"]
impl crate::Resettable for RCC_MC_RSTSCLRR_SPEC {
const RESET_VALUE: Self::Ux = 0x15;
}
|
use std::collections::HashSet;
use std::convert::TryFrom;
const INPUT: &str = include_str!("../input.txt");
fn part1() -> u64 {
INPUT
.split("\n\n")
.map(|answers| {
u64::try_from(
answers
.lines()
.flat_map(str::chars)
.collect::<HashSet<_>>()
.len(),
)
.unwrap()
})
.sum()
}
fn part2() -> u64 {
INPUT
.split("\n\n")
.map(|answers| {
u64::try_from(
answers
.lines()
.map(|line| line.chars().collect::<HashSet<_>>())
.reduce(|seen, next| seen.intersection(&next).cloned().collect())
.unwrap()
.len(),
)
.unwrap()
})
.sum()
}
fn main() {
println!("part 1: {}", part1());
println!("part 2: {}", part2());
}
#[cfg(test)]
mod tests {
use super::{part1, part2};
#[test]
fn test_part1() {
assert_eq!(part1(), 6763);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 3512);
}
}
|
use crate::connection::{BrokersPool, SelectionStategy, StaticPool};
use crate::query::response::GroupByResponse;
use crate::query::response::MetadataResponse;
use crate::query::response::ScanResponse;
use crate::query::response::SearchResponse;
use crate::query::response::SegmentMetadataResponse;
use crate::query::response::TimeBoundaryResponse;
use crate::query::response::{TimeseriesResponse, TopNResponse};
use crate::query::timeseries::Timeseries;
use crate::query::{
group_by::GroupBy, scan::Scan, search::Search, segment_metadata::SegmentMetadata,
time_boundary::TimeBoundary, top_n::TopN, DataSource,
};
use crate::query::{DataSourceMetadata, Query};
use reqwest::Client;
use serde::de::DeserializeOwned;
use serde::Serialize;
use std::collections::HashMap;
use thiserror::Error;
#[derive(Error, Debug)]
#[non_exhaustive]
pub enum DruidClientError {
#[error("http connection error")]
HttpConnection { source: reqwest::Error },
#[error("the data for key `{0}` is not available")]
Redaction(String),
#[error("invalid header (expected {expected:?}, found {found:?})")]
InvalidHeader { expected: String, found: String },
#[error("couldn't serialize object to json")]
ParsingError { source: serde_json::Error },
#[error("couldn't deserialize json to object")]
ParsingResponseError { source: serde_json::Error }, // todo: original json but with manageable size
#[error("Server responded with an error")]
ServerError { response: String },
#[error("unknown data store error")]
Unknown,
}
type ClientResult<T> = Result<T, DruidClientError>;
pub struct DruidClient {
http_client: Client,
brokers_pool: Box<dyn BrokersPool>,
}
impl DruidClient {
pub fn new(nodes: Vec<String>) -> Self {
let strategy = SelectionStategy::default_for(&nodes);
DruidClient {
http_client: Client::new(),
brokers_pool: Box::new(StaticPool::new(nodes, strategy)),
}
}
fn url(&self) -> String {
format!("http://{}/druid/v2/?pretty", self.brokers_pool.broker())
}
async fn http_query(&self, request: &str) -> Result<String, DruidClientError> {
let response_str = self
.http_client
.post(&self.url())
.body(request.to_string())
.header(reqwest::header::CONTENT_TYPE, "application/json")
.send()
.await
.map_err(|source| DruidClientError::HttpConnection { source: source })?
.text()
.await
.map_err(|source| DruidClientError::HttpConnection { source: source })?;
let json_value = serde_json::from_str::<serde_json::Value>(&response_str)
.map_err(|err| DruidClientError::ParsingError { source: err });
if let Some(_) = json_value?.get("error") {
return Err(DruidClientError::ServerError {
response: response_str,
});
}
Ok(response_str)
}
pub async fn query<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &Query,
) -> ClientResult<Vec<T>> {
self._query(query).await
}
pub async fn top_n<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &TopN,
) -> ClientResult<Vec<TopNResponse<T>>> {
self._query(query).await
}
pub async fn search<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &Search,
) -> ClientResult<Vec<SearchResponse>> {
self._query(query).await
}
pub async fn group_by<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &GroupBy,
) -> ClientResult<Vec<GroupByResponse<T>>> {
self._query(query).await
}
pub async fn scan<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &Scan,
) -> ClientResult<Vec<ScanResponse<T>>> {
self._query(query).await
}
pub async fn time_boundary<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &TimeBoundary,
) -> ClientResult<Vec<TimeBoundaryResponse>> {
self._query(query).await
}
pub async fn segment_metadata(
&self,
query: &SegmentMetadata,
) -> ClientResult<Vec<SegmentMetadataResponse>> {
self._query(query).await
}
pub async fn timeseries<'a, T: DeserializeOwned + std::fmt::Debug + Serialize>(
&self,
query: &Timeseries,
) -> ClientResult<Vec<TimeseriesResponse<T>>> {
self._query(query).await
}
async fn _query<Req, Resp>(&self, query: &Req) -> ClientResult<Resp>
where
Req: Serialize,
Resp: DeserializeOwned,
{
let request = serde_json::to_string(&query)
.map_err(|err| DruidClientError::ParsingError { source: err });
let response = match request {
Ok(str) => self.http_query(&str).await,
Err(e) => Err(e),
};
let response = response.and_then(|str| {
serde_json::from_str::<Resp>(&str)
.map_err(|source| DruidClientError::ParsingResponseError { source: source })
});
response
}
pub async fn datasource_metadata(
self,
data_source: DataSource,
) -> ClientResult<Vec<MetadataResponse<HashMap<String, String>>>> {
let query = DataSourceMetadata {
data_source: data_source,
context: Default::default(),
};
self._query(&query).await
}
}
|
extern crate libc;
extern crate hoedown;
use std::ffi::{CString,CStr};
use hoedown::{Markdown,Html,Render};
#[no_mangle]
pub fn libi18nrs_convert(input: *const libc::c_char) -> *const libc::c_char {
let slice = unsafe { CStr::from_ptr(input)};
let ins = std::str::from_utf8(slice.to_bytes()).unwrap();
let doc = Markdown::new(ins);
let mut renderer = Html::new(hoedown::renderer::html::Flags::empty(), 0);
let b = renderer.render(&doc);
let html = b.to_str().unwrap();
let s = CString::new(html).unwrap();
let p = s.as_ptr();
std::mem::forget(s);
p
}
#[no_mangle]
pub fn libi18nrs_free(ptr: *mut libc::c_char) {
let s = unsafe { CString::from_raw(ptr) };
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::{
lamport::signature::PublicKey,
utils::rescue::{Hash, Rescue128},
};
use winterfell::{
crypto::MerkleTree,
math::{fields::f128::BaseElement, FieldElement},
};
// AGGREGATED PUBLIC KEY
// ================================================================================================
pub struct AggPublicKey {
keys: Vec<PublicKey>,
tree: MerkleTree<Rescue128>,
}
impl AggPublicKey {
pub fn new(mut keys: Vec<PublicKey>) -> Self {
// sort keys in ascending order
keys.sort();
// convert keys to arrays of bytes; each key is hashed using Rescue hash function; the
// initial hashing makes the AIR design a little simpler
let mut leaves: Vec<Hash> = Vec::new();
for key in keys.iter() {
leaves.push(Rescue128::digest(&key.to_elements()));
}
// pad the list of keys with zero keys to make sure the number of leaves is greater than
// the number of keys and is a power of two
let num_leaves = if leaves.len().is_power_of_two() {
(leaves.len() + 1).next_power_of_two()
} else {
leaves.len().next_power_of_two()
};
let zero_hash = Rescue128::digest(&[BaseElement::ZERO, BaseElement::ZERO]);
for _ in leaves.len()..num_leaves {
leaves.push(zero_hash);
}
// build a Merkle tree of all leaves
let tree = MerkleTree::new(leaves).unwrap();
AggPublicKey { keys, tree }
}
/// Returns a 32-byte representation of the aggregated public key.
pub fn root(&self) -> Hash {
*self.tree.root()
}
/// Returns the number of individual keys aggregated into this key.
pub fn num_keys(&self) -> usize {
self.keys.len()
}
/// Returns number of leaves in the aggregated public key; this will always be greater
// than the number of individual keys.
pub fn num_leaves(&self) -> usize {
self.tree.leaves().len()
}
/// Returns an individual key at the specified index, if one exists.
pub fn get_key(&self, index: usize) -> Option<PublicKey> {
if index < self.keys.len() {
Some(self.keys[index])
} else {
None
}
}
/// Returns a Merkle path to the specified leaf.
pub fn get_leaf_path(&self, index: usize) -> Vec<Hash> {
self.tree.prove(index).unwrap()
}
}
|
use std::collections::LinkedList;
use std::convert::TryFrom;
use std::result::Result;
use bytes::{Buf, BufMut, Bytes, BytesMut};
use super::mime::MimeType;
use crate::error::RSocketError;
use crate::utils::{u24, Writeable};
const MAX_MIME_LEN: usize = 0x7F + 1;
#[derive(Debug, Clone, Eq, PartialEq, Default)]
pub struct CompositeMetadata {
metadatas: LinkedList<CompositeMetadataEntry>,
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct CompositeMetadataEntry {
mime_type: MimeType,
metadata: Bytes,
}
pub struct CompositeMetadataBuilder {
inner: CompositeMetadata,
}
impl CompositeMetadataBuilder {
pub fn push<A>(mut self, mime_type: MimeType, payload: A) -> Self
where
A: AsRef<[u8]>,
{
let mut bf = BytesMut::new();
bf.put_slice(payload.as_ref());
let m = CompositeMetadataEntry::new(mime_type, bf.freeze());
self.inner.push(m);
self
}
pub fn push_entry(mut self, metadata: CompositeMetadataEntry) -> Self {
self.inner.push(metadata);
self
}
pub fn build(self) -> CompositeMetadata {
self.inner
}
}
impl Into<Vec<u8>> for CompositeMetadata {
fn into(self) -> Vec<u8> {
let mut bf = BytesMut::new();
self.write_to(&mut bf);
bf.to_vec()
}
}
impl Into<Bytes> for CompositeMetadata {
fn into(self) -> Bytes {
let mut bf = BytesMut::new();
self.write_to(&mut bf);
bf.freeze()
}
}
impl Into<BytesMut> for CompositeMetadata {
fn into(self) -> BytesMut {
let mut bf = BytesMut::new();
self.write_to(&mut bf);
bf
}
}
impl Writeable for CompositeMetadata {
fn write_to(&self, bf: &mut BytesMut) {
for it in self.iter() {
it.write_to(bf);
}
}
fn len(&self) -> usize {
let mut n = 0;
for it in self.iter() {
n += it.len();
}
n
}
}
impl CompositeMetadata {
pub fn builder() -> CompositeMetadataBuilder {
CompositeMetadataBuilder {
inner: CompositeMetadata::default(),
}
}
pub fn decode(b: &mut BytesMut) -> crate::Result<CompositeMetadata> {
let mut metadatas = LinkedList::new();
loop {
match Self::decode_once(b) {
Ok(Some(v)) => metadatas.push_back(v),
Ok(None) => break,
Err(e) => return Err(e),
}
}
Ok(CompositeMetadata { metadatas })
}
pub fn iter(&self) -> impl Iterator<Item = &CompositeMetadataEntry> {
self.metadatas.iter()
}
#[inline]
fn decode_once(bs: &mut BytesMut) -> crate::Result<Option<CompositeMetadataEntry>> {
if bs.is_empty() {
return Ok(None);
}
let first: u8 = bs.get_u8();
let mime_type = if 0x80 & first != 0 {
// Well
let n = first & 0x7F;
MimeType::WellKnown(n)
} else {
// Bad
let mime_len = (first as usize) + 1;
if bs.len() < mime_len {
return Err(RSocketError::WithDescription(
"broken composite metadata: empty MIME type!".into(),
)
.into());
}
let front = bs.split_to(mime_len);
MimeType::Normal(String::from_utf8(front.to_vec())?)
};
if bs.len() < 3 {
return Err(RSocketError::WithDescription(
"broken composite metadata: not enough bytes!".into(),
)
.into());
}
let payload_size = u24::read_advance(bs).into();
if bs.len() < payload_size {
let desc = format!("broken composite metadata: require {} bytes!", payload_size);
return Err(RSocketError::WithDescription(desc).into());
}
let metadata = bs.split_to(payload_size).freeze();
Ok(Some(CompositeMetadataEntry::new(mime_type, metadata)))
}
pub fn push(&mut self, metadata: CompositeMetadataEntry) {
self.metadatas.push_back(metadata)
}
}
impl CompositeMetadataEntry {
pub fn new(mime_type: MimeType, metadata: Bytes) -> CompositeMetadataEntry {
assert!(metadata.len() <= (u24::MAX as usize));
CompositeMetadataEntry {
mime_type,
metadata,
}
}
pub fn get_mime_type(&self) -> &MimeType {
&self.mime_type
}
pub fn get_metadata(&self) -> &Bytes {
&self.metadata
}
pub fn get_metadata_utf8(&self) -> Option<&str> {
std::str::from_utf8(&self.metadata).ok()
}
}
impl Writeable for CompositeMetadataEntry {
fn write_to(&self, bf: &mut BytesMut) {
match &self.mime_type {
MimeType::WellKnown(n) => {
// WellKnown
bf.put_u8(0x80 | n);
}
MimeType::Normal(s) => {
// NotWellKnown
let mime_type_len = s.len() as u8;
assert!(mime_type_len > 0, "invalid length of MimeType!");
bf.put_u8(mime_type_len - 1);
bf.extend_from_slice(s.as_ref());
}
};
let metadata_len = self.metadata.len();
u24::from(metadata_len).write_to(bf);
if metadata_len > 0 {
bf.extend_from_slice(&self.metadata);
}
}
fn len(&self) -> usize {
// 1byte(MIME) + 3bytes(length of payload in u24)
let mut amount = 4;
if let MimeType::Normal(s) = &self.mime_type {
amount += s.len();
}
amount += self.metadata.len();
amount
}
}
|
use crate::ui::components::{
app::{AppMessage, AppSharedProps},
inventory::InventoryMessage,
};
use raui_core::prelude::*;
use raui_material::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ItemCellProps {
#[serde(default)]
pub image: String,
#[serde(default)]
pub thin: bool,
}
implement_props_data!(ItemCellProps, "ItemCellProps");
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ItemData {
pub index: usize,
}
implement_props_data!(ItemData, "ItemData");
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct ItemCellsProps {
pub items: Vec<ItemCellProps>,
}
implement_props_data!(ItemCellsProps, "ItemCellsProps");
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct OwningInventoryProps(pub WidgetId);
implement_props_data!(OwningInventoryProps, "OwningInventoryProps");
widget_hook! {
use_item_cell(life_cycle) {
life_cycle.change(|context| {
for msg in context.messenger.messages {
if let Some(msg) = msg.downcast_ref::<ButtonMessage>() {
if msg.action == ButtonAction::TriggerStart {
match msg.sender.key() {
"prev" => {
let id = context
.shared_props
.read_cloned_or_default::<OwningInventoryProps>()
.0;
context.messenger.write(id, InventoryMessage::Prev);
}
"next" => {
let id = context
.shared_props
.read_cloned_or_default::<OwningInventoryProps>()
.0;
context.messenger.write(id, InventoryMessage::Next);
}
_ => {
if let Ok(data) = context.props.read::<ItemData>() {
let id = context
.shared_props
.read_cloned_or_default::<AppSharedProps>()
.0;
context.messenger.write(id, AppMessage::ShowPopup(data.index));
}
}
}
}
}
}
});
}
}
widget_component! {
pub item_cell(id, key, props) [use_item_cell] {
let ItemCellProps { image, thin } = props.read_cloned_or_default();
let button_props = props.clone().with(SizeBoxProps {
width: SizeBoxSizeValue::Exact(if thin { 18.0 } else { 24.0 }),
height: SizeBoxSizeValue::Exact(24.0),
margin: Rect {
left: if thin { -4.0 } else { 1.0 },
right: if thin { -4.0 } else { 1.0 },
top: 2.0,
bottom: 2.0,
},
}).with(ButtonSettingsProps {
notify: Some(id.to_owned()),
..Default::default()
});
let panel_props = props.clone().with(PaperProps {
variant: "cell".to_owned(),
frame: None,
});
let component = if thin { content_box } else { paper };
if image.is_empty() {
widget! {
(#{key} button: {button_props} {
content = (#{"panel"} component: {panel_props})
})
}
} else {
let image_props = Props::new(ImageBoxProps {
content_keep_aspect_ratio: Some(ImageBoxAspectRatio {
horizontal_alignment: 0.5,
vertical_alignment: 0.5,
}),
material: ImageBoxMaterial::Image(ImageBoxImage {
id: image,
..Default::default()
}),
..Default::default()
}).with(ContentBoxItemLayout {
margin: Rect {
left: 4.0,
right: 4.0,
top: 4.0,
bottom: 4.0,
},
..Default::default()
});
widget! {
(#{key} button: {button_props} {
content = (#{"panel"} component: {panel_props} [
(#{"icon"} image_box: {image_props})
])
})
}
}
}
}
|
pub mod cmd;
pub mod server;
pub mod tokenizer;
pub mod util;
|
use super::{field, op, stm, suffix::ASTExpSuffixList, var::Var};
pub type ASTExp = Box<ASTExp_>;
pub type ASTExpList = Vec<ASTExp>;
#[derive(Clone, Debug, PartialEq)]
pub struct ASTExp_ {
pub pos: (usize, usize),
pub data: ASTExpData,
}
#[derive(Clone, Debug, PartialEq)]
pub enum ASTExpData {
Int(String),
Short(String),
Real(String),
Char(char),
String(String),
Bool(bool),
Var(Var),
Call(ASTExp, ASTExpList),
BinOp(op::OperList, ASTExpList),
UnaryOp(op::OperList, ASTExp),
Func(field::FieldList, field::FieldList, stm::Stm),
Field(ASTExp, String),
Array(ASTExpList),
SizeOf(ASTExp),
Paren(ASTExp),
Suffix(ASTExp, ASTExpSuffixList),
None,
}
impl ASTExp_ {
pub fn none_exp(pos: (usize, usize)) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::None,
})
}
pub fn int_exp(pos: (usize, usize), data: String) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Int(data),
})
}
pub fn real_exp(pos: (usize, usize), data: String) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Real(data),
})
}
pub fn char_exp(pos: (usize, usize), data: char) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Char(data),
})
}
pub fn string_exp(pos: (usize, usize), data: String) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::String(data),
})
}
pub fn bool_exp(pos: (usize, usize), data: bool) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Bool(data),
})
}
pub fn var_exp(pos: (usize, usize), data: Var) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Var(data),
})
}
pub fn call_exp(pos: (usize, usize), func: ASTExp, args: ASTExpList) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Call(func, args),
})
}
pub fn binop_exp(pos: (usize, usize), oplist: op::OperList, explist: ASTExpList) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::BinOp(oplist, explist),
})
}
pub fn unaryop_exp(pos: (usize, usize), oplist: op::OperList, exp: ASTExp) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::UnaryOp(oplist, exp),
})
}
pub fn func_exp(
pos: (usize, usize),
params: field::FieldList,
result: field::FieldList,
stm: stm::Stm,
) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Func(params, result, stm),
})
}
pub fn field_exp(pos: (usize, usize), field: ASTExp, member: String) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Field(field, member),
})
}
pub fn array_exp(pos: (usize, usize), explist: ASTExpList) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Array(explist),
})
}
pub fn sizeof_exp(pos: (usize, usize), var: ASTExp) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::SizeOf(var),
})
}
pub fn paren_exp(pos: (usize, usize), exp: ASTExp) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Paren(exp),
})
}
pub fn suffix_exp(pos: (usize, usize), exp: ASTExp, suffix: ASTExpSuffixList) -> ASTExp {
Box::new(ASTExp_ {
pos,
data: ASTExpData::Suffix(exp, suffix),
})
}
}
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use Error;
use glib::translate::*;
use std::ptr;
use vte_sys;
glib_wrapper! {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Regex(Shared<vte_sys::VteRegex>);
match fn {
ref => |ptr| vte_sys::vte_regex_ref(ptr),
unref => |ptr| vte_sys::vte_regex_unref(ptr),
get_type => || vte_sys::vte_regex_get_type(),
}
}
impl Regex {
pub fn new_for_match(pattern: &str, flags: u32) -> Result<Regex, Error> {
assert_initialized_main_thread!();
let pattern_length = pattern.len() as isize;
unsafe {
let mut error = ptr::null_mut();
let ret = vte_sys::vte_regex_new_for_match(pattern.to_glib_none().0, pattern_length, flags, &mut error);
if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) }
}
}
pub fn new_for_search(pattern: &str, flags: u32) -> Result<Regex, Error> {
assert_initialized_main_thread!();
let pattern_length = pattern.len() as isize;
unsafe {
let mut error = ptr::null_mut();
let ret = vte_sys::vte_regex_new_for_search(pattern.to_glib_none().0, pattern_length, flags, &mut error);
if error.is_null() { Ok(from_glib_full(ret)) } else { Err(from_glib_full(error)) }
}
}
pub fn jit(&self, flags: u32) -> Result<(), Error> {
unsafe {
let mut error = ptr::null_mut();
let _ = vte_sys::vte_regex_jit(self.to_glib_none().0, flags, &mut error);
if error.is_null() { Ok(()) } else { Err(from_glib_full(error)) }
}
}
}
|
use std::fmt::{Display, Formatter, Result};
#[derive(Debug, PartialEq, Eq)]
pub struct Clock {
hours: i32,
minutes: i32,
}
impl Display for Clock {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{:02}:{:02}", self.hours, self.minutes)
}
}
impl Clock {
pub fn new(hours: i32, minutes: i32) -> Self {
let min_in_day = 24 * 60;
let total_minutes = ((hours * 60 + minutes) % min_in_day + min_in_day) % min_in_day;
Clock {
hours: total_minutes / 60,
minutes: total_minutes % 60,
}
}
pub fn add_minutes(&self, minutes: i32) -> Self {
Clock::new(self.hours, self.minutes + minutes)
}
}
|
/// Values that indicate specific hardware feature sets that are available.
/// Each GPU family provides a different set of features andhardware limits.
/// For more information, see [Metal Programming Guide](
/// https://developer.apple.com/library/prerelease/ios/documentation/
/// Miscellaneous/Conceptual/MetalProgrammingGuide/Introduction/
/// Introduction.html#//apple_ref/doc/uid/TP40014221
/// ).
#[cfg(target_os = "ios")]
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLFeatureSet {
/// The baseline feature set supported by the first generation of iOS GPUs that support Metal.
/// This feature set is supported by the A7 GPU.
MTLFeatureSet_iOS_GPUFamily1_v1 = 0,
/// The baseline feature set supported by the second generation of iOS GPUs that support Metal.
/// This feature set is supported by the A8 GPU.
MTLFeatureSet_iOS_GPUFamily2_v1 = 1,
/// The extended feature set supported by the first generation of iOS GPUs that support Metal.
/// This feature set is supported by the A7 GPU.
MTLFeatureSet_iOS_GPUFamily1_v2 = 2,
/// The extended feature set supported by the second generation of iOS GPUs that support Metal.
/// This feature set is supported by the A8 GPU.
MTLFeatureSet_iOS_GPUFamily2_v2 = 3,
/// The feature set supported by the third generation of iOS GPUs that support Metal.
/// This feature set is supported by the A9 GPU.
MTLFeatureSet_iOS_GPUFamily3_v1 = 4
}
/// Values that indicate specific hardware feature sets that are available.
/// Each GPU family provides a different set of features andhardware limits.
/// For more information, see [Metal Programming Guide](
/// https://developer.apple.com/library/prerelease/mac/documentation/
/// Miscellaneous/Conceptual/MetalProgrammingGuide/Introduction/
/// Introduction.html#//apple_ref/doc/uid/TP40014221
/// ).
#[cfg(target_os = "macos")]
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLFeatureSet {
/// The feature set supported by all OS X GPUs that support Metal.
MTLFeatureSet_OSX_GPUFamily1_v1 = 10000,
#[doc(hidden)]
_non_unary_compile_dummy = 0
}
/// Controls which argument information is made available for reflection by the creation of the
/// pipeline.
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLPipelineOption {
MTLPipelineOptionNone = 0,
MTLPipelineOptionArgumentInfo = 1 << 0,
MTLPipelineOptionBufferTypeInfo = 1 << 1
}
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLCompareFunction {
MTLCompareFunctionNever = 0,
MTLCompareFunctionLess = 1,
MTLCompareFunctionEqual = 2,
MTLCompareFunctionLessEqual = 3,
MTLCompareFunctionGreater = 4,
MTLCompareFunctionNotEqual = 5,
MTLCompareFunctionGreaterEqual = 6,
MTLCompareFunctionAlways = 7
}
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLPixelFormat {
MTLPixelFormatInvalid = 0,
MTLPixelFormatA8Unorm = 1,
MTLPixelFormatR8Unorm = 10,
MTLPixelFormatR8Snorm = 12,
MTLPixelFormatR8Uint = 13,
MTLPixelFormatR8Sint = 14,
MTLPixelFormatR16Unorm = 20,
MTLPixelFormatR16Snorm = 22,
MTLPixelFormatR16Uint = 23,
MTLPixelFormatR16Sint = 24,
MTLPixelFormatR16Float = 25,
MTLPixelFormatRG8Unorm = 30,
MTLPixelFormatRG8Snorm = 32,
MTLPixelFormatRG8Uint = 33,
MTLPixelFormatRG8Sint = 34,
MTLPixelFormatR32Uint = 53,
MTLPixelFormatR32Sint = 54,
MTLPixelFormatR32Float = 55,
MTLPixelFormatRG16Unorm = 60,
MTLPixelFormatRG16Snorm = 62,
MTLPixelFormatRG16Uint = 63,
MTLPixelFormatRG16Sint = 64,
MTLPixelFormatRG16Float = 65,
MTLPixelFormatRGBA8Unorm = 70,
MTLPixelFormatRGBA8Unorm_sRGB = 71,
MTLPixelFormatRGBA8Snorm = 72,
MTLPixelFormatRGBA8Uint = 73,
MTLPixelFormatRGBA8Sint = 74,
MTLPixelFormatBGRA8Unorm = 80,
MTLPixelFormatBGRA8Unorm_sRGB = 81,
MTLPixelFormatRGB10A2Unorm = 90,
MTLPixelFormatRGB10A2Uint = 91,
MTLPixelFormatRG11B10Float = 92,
MTLPixelFormatRGB9E5Float = 93,
MTLPixelFormatRG32Uint = 103,
MTLPixelFormatRG32Sint = 104,
MTLPixelFormatRG32Float = 105,
MTLPixelFormatRGBA16Unorm = 110,
MTLPixelFormatRGBA16Snorm = 112,
MTLPixelFormatRGBA16Uint = 113,
MTLPixelFormatRGBA16Sint = 114,
MTLPixelFormatRGBA16Float = 115,
MTLPixelFormatRGBA32Uint = 123,
MTLPixelFormatRGBA32Sint = 124,
MTLPixelFormatRGBA32Float = 125,
MTLPixelFormatBC1_RGBA = 130,
MTLPixelFormatBC1_RGBA_sRGB = 131,
MTLPixelFormatBC2_RGBA = 132,
MTLPixelFormatBC2_RGBA_sRGB = 133,
MTLPixelFormatBC3_RGBA = 134,
MTLPixelFormatBC3_RGBA_sRGB = 135,
MTLPixelFormatBC4_RUnorm = 140,
MTLPixelFormatBC4_RSnorm = 141,
MTLPixelFormatBC5_RGUnorm = 142,
MTLPixelFormatBC5_RGSnorm = 143,
MTLPixelFormatBC6H_RGBFloat = 150,
MTLPixelFormatBC6H_RGBUfloat = 151,
MTLPixelFormatBC7_RGBAUnorm = 152,
MTLPixelFormatBC7_RGBAUnorm_sRGB = 153,
MTLPixelFormatGBGR422 = 240,
MTLPixelFormatBGRG422 = 241,
MTLPixelFormatDepth32Float = 252,
MTLPixelFormatStencil8 = 253,
MTLPixelFormatDepth24Unorm_Stencil8 = 255,
MTLPixelFormatDepth32Float_Stencil8 = 260
}
/// The current stage in the lifetime of the command buffer, as it proceeds from enqueued to
/// committed to scheduled to completed.
#[repr(usize)]
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub enum MTLCommandBufferStatus {
/// The command buffer is not enqueued yet. This is the starting value of the status property
/// before the enqueue method is called.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusNotEnqueued = 0,
/// The command buffer is enqueued.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusEnqueued = 1,
/// The command buffer is committed for execution.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusCommitted = 2,
/// The command buffer is scheduled. A command buffer is considered scheduled when any
/// dependencies between work tasks submitted by other command buffers or other APIs
/// in the system are satisfied.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusScheduled = 3,
/// The command buffer completed execution successfully.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusCompleted = 4,
/// Execution of the command buffer was aborted due to an error during execution. Check
/// the errorproperty for more information.
///
/// Available in iOS 8.0 and later.
MTLCommandBufferStatusError = 5
}
|
use crate::access_control;
use fund::{
accounts::Fund,
error::{FundError, FundErrorCode},
};
use serum_common::pack::Pack;
use solana_program::{
account_info::{next_account_info, AccountInfo},
msg,
pubkey::Pubkey,
};
use std::convert::Into;
pub fn handler(
program_id: &Pubkey,
accounts: &[AccountInfo],
amount: u64,
) -> Result<(), FundError> {
msg!("Handler: payback_init");
let acc_infos = &mut accounts.iter();
let fund_acc_info = next_account_info(acc_infos)?;
let owner_acc_info = next_account_info(acc_infos)?;
access_control(AccessControlRequest {
program_id,
fund_acc_info,
owner_acc_info,
})?;
Fund::unpack_mut(
&mut fund_acc_info.try_borrow_mut_data()?,
&mut |fund_acc: &mut Fund| {
state_transistion(StateTransistionRequest { fund_acc, amount }).map_err(Into::into)
},
)?;
Ok(())
}
fn access_control(req: AccessControlRequest) -> Result<(), FundError> {
let AccessControlRequest {
program_id,
fund_acc_info,
owner_acc_info,
} = req;
if !owner_acc_info.is_signer {
return Err(FundErrorCode::Unauthorized.into());
}
let _ = access_control::fund(fund_acc_info, program_id)?;
let _ = access_control::withdraw(program_id, fund_acc_info, owner_acc_info);
Ok(())
}
fn state_transistion(req: StateTransistionRequest) -> Result<(), FundError> {
let StateTransistionRequest { fund_acc, amount } = req;
msg!("State-Transistion: Initialize Register Payback");
let per_share = fund_acc.shares.checked_div(amount).unwrap();
fund_acc.add_new_payback(amount, per_share);
msg!("State-Transistion: Initialize Register Payback Success");
Ok(())
}
struct AccessControlRequest<'a, 'b> {
program_id: &'a Pubkey,
fund_acc_info: &'a AccountInfo<'b>,
owner_acc_info: &'a AccountInfo<'b>,
}
struct StateTransistionRequest<'a> {
fund_acc: &'a mut Fund,
amount: u64,
}
|
/// An enum to represent all characters in the Lao block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Lao {
/// \u{e81}: 'ກ'
LetterKo,
/// \u{e82}: 'ຂ'
LetterKhoSung,
/// \u{e84}: 'ຄ'
LetterKhoTam,
/// \u{e86}: 'ຆ'
LetterPaliGha,
/// \u{e87}: 'ງ'
LetterNgo,
/// \u{e88}: 'ຈ'
LetterCo,
/// \u{e89}: 'ຉ'
LetterPaliCha,
/// \u{e8a}: 'ຊ'
LetterSoTam,
/// \u{e8c}: 'ຌ'
LetterPaliJha,
/// \u{e8d}: 'ຍ'
LetterNyo,
/// \u{e8e}: 'ຎ'
LetterPaliNya,
/// \u{e8f}: 'ຏ'
LetterPaliTta,
/// \u{e90}: 'ຐ'
LetterPaliTtha,
/// \u{e91}: 'ຑ'
LetterPaliDda,
/// \u{e92}: 'ຒ'
LetterPaliDdha,
/// \u{e93}: 'ຓ'
LetterPaliNna,
/// \u{e94}: 'ດ'
LetterDo,
/// \u{e95}: 'ຕ'
LetterTo,
/// \u{e96}: 'ຖ'
LetterThoSung,
/// \u{e97}: 'ທ'
LetterThoTam,
/// \u{e98}: 'ຘ'
LetterPaliDha,
/// \u{e99}: 'ນ'
LetterNo,
/// \u{e9a}: 'ບ'
LetterBo,
/// \u{e9b}: 'ປ'
LetterPo,
/// \u{e9c}: 'ຜ'
LetterPhoSung,
/// \u{e9d}: 'ຝ'
LetterFoTam,
/// \u{e9e}: 'ພ'
LetterPhoTam,
/// \u{e9f}: 'ຟ'
LetterFoSung,
/// \u{ea0}: 'ຠ'
LetterPaliBha,
/// \u{ea1}: 'ມ'
LetterMo,
/// \u{ea2}: 'ຢ'
LetterYo,
/// \u{ea3}: 'ຣ'
LetterLoLing,
/// \u{ea5}: 'ລ'
LetterLoLoot,
/// \u{ea7}: 'ວ'
LetterWo,
/// \u{ea8}: 'ຨ'
LetterSanskritSha,
/// \u{ea9}: 'ຩ'
LetterSanskritSsa,
/// \u{eaa}: 'ສ'
LetterSoSung,
/// \u{eab}: 'ຫ'
LetterHoSung,
/// \u{eac}: 'ຬ'
LetterPaliLla,
/// \u{ead}: 'ອ'
LetterO,
/// \u{eae}: 'ຮ'
LetterHoTam,
/// \u{eaf}: 'ຯ'
Ellipsis,
/// \u{eb0}: 'ະ'
VowelSignA,
/// \u{eb1}: 'ັ'
VowelSignMaiKan,
/// \u{eb2}: 'າ'
VowelSignAa,
/// \u{eb3}: 'ຳ'
VowelSignAm,
/// \u{eb4}: 'ິ'
VowelSignI,
/// \u{eb5}: 'ີ'
VowelSignIi,
/// \u{eb6}: 'ຶ'
VowelSignY,
/// \u{eb7}: 'ື'
VowelSignYy,
/// \u{eb8}: 'ຸ'
VowelSignU,
/// \u{eb9}: 'ູ'
VowelSignUu,
/// \u{eba}: '຺'
SignPaliVirama,
/// \u{ebb}: 'ົ'
VowelSignMaiKon,
/// \u{ebc}: 'ຼ'
SemivowelSignLo,
/// \u{ebd}: 'ຽ'
SemivowelSignNyo,
/// \u{ec0}: 'ເ'
VowelSignE,
/// \u{ec1}: 'ແ'
VowelSignEi,
/// \u{ec2}: 'ໂ'
VowelSignO,
/// \u{ec3}: 'ໃ'
VowelSignAy,
/// \u{ec4}: 'ໄ'
VowelSignAi,
/// \u{ec6}: 'ໆ'
KoLa,
/// \u{ec8}: '່'
ToneMaiEk,
/// \u{ec9}: '້'
ToneMaiTho,
/// \u{eca}: '໊'
ToneMaiTi,
/// \u{ecb}: '໋'
ToneMaiCatawa,
/// \u{ecc}: '໌'
CancellationMark,
/// \u{ecd}: 'ໍ'
Niggahita,
/// \u{ed0}: '໐'
DigitZero,
/// \u{ed1}: '໑'
DigitOne,
/// \u{ed2}: '໒'
DigitTwo,
/// \u{ed3}: '໓'
DigitThree,
/// \u{ed4}: '໔'
DigitFour,
/// \u{ed5}: '໕'
DigitFive,
/// \u{ed6}: '໖'
DigitSix,
/// \u{ed7}: '໗'
DigitSeven,
/// \u{ed8}: '໘'
DigitEight,
/// \u{ed9}: '໙'
DigitNine,
/// \u{edc}: 'ໜ'
HoNo,
/// \u{edd}: 'ໝ'
HoMo,
/// \u{ede}: 'ໞ'
LetterKhmuGo,
/// \u{edf}: 'ໟ'
LetterKhmuNyo,
}
impl Into<char> for Lao {
fn into(self) -> char {
match self {
Lao::LetterKo => 'ກ',
Lao::LetterKhoSung => 'ຂ',
Lao::LetterKhoTam => 'ຄ',
Lao::LetterPaliGha => 'ຆ',
Lao::LetterNgo => 'ງ',
Lao::LetterCo => 'ຈ',
Lao::LetterPaliCha => 'ຉ',
Lao::LetterSoTam => 'ຊ',
Lao::LetterPaliJha => 'ຌ',
Lao::LetterNyo => 'ຍ',
Lao::LetterPaliNya => 'ຎ',
Lao::LetterPaliTta => 'ຏ',
Lao::LetterPaliTtha => 'ຐ',
Lao::LetterPaliDda => 'ຑ',
Lao::LetterPaliDdha => 'ຒ',
Lao::LetterPaliNna => 'ຓ',
Lao::LetterDo => 'ດ',
Lao::LetterTo => 'ຕ',
Lao::LetterThoSung => 'ຖ',
Lao::LetterThoTam => 'ທ',
Lao::LetterPaliDha => 'ຘ',
Lao::LetterNo => 'ນ',
Lao::LetterBo => 'ບ',
Lao::LetterPo => 'ປ',
Lao::LetterPhoSung => 'ຜ',
Lao::LetterFoTam => 'ຝ',
Lao::LetterPhoTam => 'ພ',
Lao::LetterFoSung => 'ຟ',
Lao::LetterPaliBha => 'ຠ',
Lao::LetterMo => 'ມ',
Lao::LetterYo => 'ຢ',
Lao::LetterLoLing => 'ຣ',
Lao::LetterLoLoot => 'ລ',
Lao::LetterWo => 'ວ',
Lao::LetterSanskritSha => 'ຨ',
Lao::LetterSanskritSsa => 'ຩ',
Lao::LetterSoSung => 'ສ',
Lao::LetterHoSung => 'ຫ',
Lao::LetterPaliLla => 'ຬ',
Lao::LetterO => 'ອ',
Lao::LetterHoTam => 'ຮ',
Lao::Ellipsis => 'ຯ',
Lao::VowelSignA => 'ະ',
Lao::VowelSignMaiKan => 'ັ',
Lao::VowelSignAa => 'າ',
Lao::VowelSignAm => 'ຳ',
Lao::VowelSignI => 'ິ',
Lao::VowelSignIi => 'ີ',
Lao::VowelSignY => 'ຶ',
Lao::VowelSignYy => 'ື',
Lao::VowelSignU => 'ຸ',
Lao::VowelSignUu => 'ູ',
Lao::SignPaliVirama => '຺',
Lao::VowelSignMaiKon => 'ົ',
Lao::SemivowelSignLo => 'ຼ',
Lao::SemivowelSignNyo => 'ຽ',
Lao::VowelSignE => 'ເ',
Lao::VowelSignEi => 'ແ',
Lao::VowelSignO => 'ໂ',
Lao::VowelSignAy => 'ໃ',
Lao::VowelSignAi => 'ໄ',
Lao::KoLa => 'ໆ',
Lao::ToneMaiEk => '່',
Lao::ToneMaiTho => '້',
Lao::ToneMaiTi => '໊',
Lao::ToneMaiCatawa => '໋',
Lao::CancellationMark => '໌',
Lao::Niggahita => 'ໍ',
Lao::DigitZero => '໐',
Lao::DigitOne => '໑',
Lao::DigitTwo => '໒',
Lao::DigitThree => '໓',
Lao::DigitFour => '໔',
Lao::DigitFive => '໕',
Lao::DigitSix => '໖',
Lao::DigitSeven => '໗',
Lao::DigitEight => '໘',
Lao::DigitNine => '໙',
Lao::HoNo => 'ໜ',
Lao::HoMo => 'ໝ',
Lao::LetterKhmuGo => 'ໞ',
Lao::LetterKhmuNyo => 'ໟ',
}
}
}
impl std::convert::TryFrom<char> for Lao {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'ກ' => Ok(Lao::LetterKo),
'ຂ' => Ok(Lao::LetterKhoSung),
'ຄ' => Ok(Lao::LetterKhoTam),
'ຆ' => Ok(Lao::LetterPaliGha),
'ງ' => Ok(Lao::LetterNgo),
'ຈ' => Ok(Lao::LetterCo),
'ຉ' => Ok(Lao::LetterPaliCha),
'ຊ' => Ok(Lao::LetterSoTam),
'ຌ' => Ok(Lao::LetterPaliJha),
'ຍ' => Ok(Lao::LetterNyo),
'ຎ' => Ok(Lao::LetterPaliNya),
'ຏ' => Ok(Lao::LetterPaliTta),
'ຐ' => Ok(Lao::LetterPaliTtha),
'ຑ' => Ok(Lao::LetterPaliDda),
'ຒ' => Ok(Lao::LetterPaliDdha),
'ຓ' => Ok(Lao::LetterPaliNna),
'ດ' => Ok(Lao::LetterDo),
'ຕ' => Ok(Lao::LetterTo),
'ຖ' => Ok(Lao::LetterThoSung),
'ທ' => Ok(Lao::LetterThoTam),
'ຘ' => Ok(Lao::LetterPaliDha),
'ນ' => Ok(Lao::LetterNo),
'ບ' => Ok(Lao::LetterBo),
'ປ' => Ok(Lao::LetterPo),
'ຜ' => Ok(Lao::LetterPhoSung),
'ຝ' => Ok(Lao::LetterFoTam),
'ພ' => Ok(Lao::LetterPhoTam),
'ຟ' => Ok(Lao::LetterFoSung),
'ຠ' => Ok(Lao::LetterPaliBha),
'ມ' => Ok(Lao::LetterMo),
'ຢ' => Ok(Lao::LetterYo),
'ຣ' => Ok(Lao::LetterLoLing),
'ລ' => Ok(Lao::LetterLoLoot),
'ວ' => Ok(Lao::LetterWo),
'ຨ' => Ok(Lao::LetterSanskritSha),
'ຩ' => Ok(Lao::LetterSanskritSsa),
'ສ' => Ok(Lao::LetterSoSung),
'ຫ' => Ok(Lao::LetterHoSung),
'ຬ' => Ok(Lao::LetterPaliLla),
'ອ' => Ok(Lao::LetterO),
'ຮ' => Ok(Lao::LetterHoTam),
'ຯ' => Ok(Lao::Ellipsis),
'ະ' => Ok(Lao::VowelSignA),
'ັ' => Ok(Lao::VowelSignMaiKan),
'າ' => Ok(Lao::VowelSignAa),
'ຳ' => Ok(Lao::VowelSignAm),
'ິ' => Ok(Lao::VowelSignI),
'ີ' => Ok(Lao::VowelSignIi),
'ຶ' => Ok(Lao::VowelSignY),
'ື' => Ok(Lao::VowelSignYy),
'ຸ' => Ok(Lao::VowelSignU),
'ູ' => Ok(Lao::VowelSignUu),
'຺' => Ok(Lao::SignPaliVirama),
'ົ' => Ok(Lao::VowelSignMaiKon),
'ຼ' => Ok(Lao::SemivowelSignLo),
'ຽ' => Ok(Lao::SemivowelSignNyo),
'ເ' => Ok(Lao::VowelSignE),
'ແ' => Ok(Lao::VowelSignEi),
'ໂ' => Ok(Lao::VowelSignO),
'ໃ' => Ok(Lao::VowelSignAy),
'ໄ' => Ok(Lao::VowelSignAi),
'ໆ' => Ok(Lao::KoLa),
'່' => Ok(Lao::ToneMaiEk),
'້' => Ok(Lao::ToneMaiTho),
'໊' => Ok(Lao::ToneMaiTi),
'໋' => Ok(Lao::ToneMaiCatawa),
'໌' => Ok(Lao::CancellationMark),
'ໍ' => Ok(Lao::Niggahita),
'໐' => Ok(Lao::DigitZero),
'໑' => Ok(Lao::DigitOne),
'໒' => Ok(Lao::DigitTwo),
'໓' => Ok(Lao::DigitThree),
'໔' => Ok(Lao::DigitFour),
'໕' => Ok(Lao::DigitFive),
'໖' => Ok(Lao::DigitSix),
'໗' => Ok(Lao::DigitSeven),
'໘' => Ok(Lao::DigitEight),
'໙' => Ok(Lao::DigitNine),
'ໜ' => Ok(Lao::HoNo),
'ໝ' => Ok(Lao::HoMo),
'ໞ' => Ok(Lao::LetterKhmuGo),
'ໟ' => Ok(Lao::LetterKhmuNyo),
_ => Err(()),
}
}
}
impl Into<u32> for Lao {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Lao {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Lao {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Lao {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Lao::LetterKo
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Lao{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use bevy::prelude::*;
use crate::position::Position;
use crate::{ARENA_HEIGHT, ARENA_WIDTH};
use crate::size::Size;
pub fn position_translation(windows: Res<Windows>, mut q: Query<(&Position, &mut Transform)>) {
let window = windows.get_primary().unwrap();
for (pos, mut transform) in q.iter_mut() {
transform.translation = pos.convert(window.width(), window.height()).into();
}
}
pub fn size_scaling(windows: Res<Windows>, mut q: Query<(&Size, &mut Sprite)>) {
let window = windows.get_primary().unwrap();
for (sprite_size, mut sprite) in q.iter_mut() {
sprite.size = Vec2::new(
sprite_size.width / ARENA_WIDTH as f32 * window.width() as f32,
sprite_size.height / ARENA_HEIGHT as f32 * window.height() as f32,
);
}
}
|
use std::env;
use std::io::{self, BufRead};
use std::process::{self, Command};
use regex::Regex;
fn process_arguments(mut args: Vec<String>)->(String, String){
let help_command = args.iter().find(|a|["help", "-h", "--help"].contains(&&a[..]));
if help_command.is_some() || args.len() < 3 {
println!("usage: regexargs RegularExpression ExecuteCommand");
println!("ex: echo hello.txt | regexargs \"(.*)\\\\.(.*)\" echo {{0}} {{1}} {{2}}");
println!("Output: hello.txt hello txt");
process::exit(1);
}
args.remove(0);
let user_regex_string = args.remove(0);
let user_command = args.join(" ");
(user_regex_string, user_command)
}
fn make_command(user_regex:&Regex, user_command:&String, line:String)->String{
let line_parse_regex = Regex::new(r"(\{\d+\})").unwrap();
let captures = user_regex.captures(&line);
let matches = match captures{
Some(caps)=> {
let mut v:Vec<String> = caps.iter().map(|v|v.unwrap().as_str().to_string()).collect();
v.remove(0);
v
},
None=> vec![]
};
let mut exec_command = user_command.clone();
for target in line_parse_regex.captures_iter(&user_command){
let holder = target.get(1).unwrap();
let text = holder.as_str();
let x: &[_] = &['{', '}'];
let index:usize = text.trim_matches(x).parse().unwrap();
if index > matches.len(){
println!("{:?}", matches);
panic!(format!("{}th group is not exist", index));
}
if index == 0{
exec_command = exec_command.replace(holder.as_str(), &line);
}
else{
exec_command = exec_command.replace(holder.as_str(), &matches[index-1]);
}
}
exec_command
}
fn split_commandline(command:&String) -> (String, Vec<String>){
let mut tokens:Vec<String> = command.split(' ').map(|s| s.to_string()).collect();
let process = tokens.remove(0);
(process, tokens)
}
fn main() {
let (user_regex_string, user_command) = process_arguments(env::args().collect());
let user_regex = match Regex::new(&user_regex_string){
Ok(x) => x,
Err(e) => {
println!("{}", e);
process::exit(1)
}
};
let stdin = io::stdin();
for line in stdin.lock().lines(){
let this_line = line.unwrap();
let exec_command = make_command(&user_regex, &user_command, this_line);
let (process, args) = split_commandline(&exec_command);
let mut cmd = Command::new(process);
cmd.args(args);
let output = cmd.output().unwrap().stdout;
print!("{}", String::from_utf8(output).unwrap());
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_process_arguments() {
let input:Vec<String> = vec!["process", "def", "ghi", "jkl"].into_iter().map(String::from).collect();
assert_eq!(process_arguments(input), (String::from("def"), String::from("ghi jkl")));
}
#[test]
fn test_make_command() {
let user_regex = Regex::new(r"(\d+)").unwrap();
let result = make_command(&user_regex, &String::from("good {1} bye"), String::from("hello 123 world"));
assert_eq!(result, String::from("good 123 bye"));
}
#[test]
fn test_split_commandline() {
let (command, args) = split_commandline(&String::from("hello good bye"));
assert_eq!(command, String::from("hello"));
let v:Vec<String> = vec!["good", "bye"].into_iter().map(String::from).collect();
assert_eq!(args, v);
}
} |
use std::{io::SeekFrom, sync::Mutex};
use crate::{chunk_map::ChunkMap, oid, superblock::Superblock, tree::Tree, DiskKey, DiskKeyType};
const FIRST_CHUNK_TREE_OBJECTID: u64 = 256;
pub fn read_node_phys<D: fal::Device>(
device: &mut D,
superblock: &Superblock,
offset: u64,
) -> Box<[u8]> {
let mut bytes = vec![0u8; superblock.node_size as usize];
device.seek(SeekFrom::Start(offset)).unwrap();
device.read_exact(&mut bytes).unwrap();
bytes.into_boxed_slice()
}
pub fn read_node<D: fal::Device>(
device: &mut D,
superblock: &Superblock,
chunk_map: &ChunkMap,
offset: u64,
) -> Box<[u8]> {
read_node_phys(
device,
superblock,
chunk_map.get(superblock, offset).unwrap(),
)
}
#[derive(Debug)]
pub struct Filesystem<D: fal::Device> {
pub device: Mutex<D>,
pub superblock: Superblock,
pub chunk_map: ChunkMap,
pub root_tree: Tree,
pub chunk_tree: Tree,
pub extent_tree: Tree,
pub dev_tree: Tree,
pub fs_tree: Tree,
pub csum_tree: Tree,
pub quota_tree: Option<Tree>,
pub uuid_tree: Tree,
pub free_space_tree: Option<Tree>,
}
impl<D: fal::Device> Filesystem<D> {
pub fn mount(mut device: D) -> Self {
let superblock = Superblock::load(&mut device);
let mut chunk_map = ChunkMap::read_sys_chunk_array(&superblock);
let chunk_tree = Tree::load(&mut device, &superblock, &chunk_map, superblock.chunk_root);
chunk_map.read_chunk_tree(&mut device, &superblock, &chunk_tree);
let root_tree = Tree::load(&mut device, &superblock, &chunk_map, superblock.root);
let extent_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::EXTENT_TREE,
)
.unwrap();
let dev_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::DEV_TREE,
)
.unwrap();
let fs_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::FS_TREE,
)
.unwrap();
let csum_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::CSUM_TREE,
)
.unwrap();
// It seems like the quota tree may not necessarily exist. Or, the quota tree is simply
// stored somewhere else.
let quota_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::QUOTA_TREE,
);
let uuid_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::UUID_TREE,
)
.unwrap();
// The existence of the free space tree also seems optional. It's possibly located in the
// fs tree, right?
let free_space_tree = Self::load_tree(
&mut device,
&superblock,
&chunk_map,
&root_tree,
oid::FREE_SPACE_TREE,
);
Self {
device: Mutex::new(device),
superblock,
chunk_map,
root_tree,
chunk_tree,
extent_tree,
dev_tree,
fs_tree,
csum_tree,
quota_tree,
uuid_tree,
free_space_tree,
}
}
fn load_tree(
device: &mut D,
superblock: &Superblock,
chunk_map: &ChunkMap,
tree: &Tree,
oid: u64,
) -> Option<Tree> {
let value = match tree.get(
device,
superblock,
chunk_map,
&DiskKey {
oid,
ty: DiskKeyType::RootItem,
offset: 0,
},
) {
Some(v) => v,
None => return None,
};
let root_item = value.into_root_item().unwrap();
Some(Tree::load(device, superblock, chunk_map, root_item.addr))
}
}
|
use alloc::vec::Vec;
use keccak::KeccakF1600;
use util::Hash;
pub struct Sha3_256(KeccakF1600);
impl Sha3_256 {
pub fn new() -> Self {
Self::default()
}
}
impl Default for Sha3_256 {
fn default() -> Self {
Self(KeccakF1600::new(1088, 512, 256 / 8))
}
}
impl Hash for Sha3_256 {
fn hash_to_bytes(&mut self, message: &[u8]) -> Vec<u8> {
self.0.keccak(message, 0x06)
}
}
|
#[macro_use]
extern crate nom;
#[macro_use]
extern crate failure;
extern crate bitvec;
mod errors;
mod huffman;
mod parser;
pub use parser::decode;
|
extern crate wordcrab;
use wordcrab::*;
mod analysis_options;
use analysis_options::*;
#[test]
fn empty_string() {
let stats = analyse_string("", ANALYSIS_OPTIONS_LWC);
assert_eq!(stats.lines, Some(0));
assert_eq!(stats.words, Some(0));
assert_eq!(stats.chars, Some(0));
}
#[test]
fn empty_line() {
let stats = analyse_string("\n", ANALYSIS_OPTIONS_LWC);
assert_eq!(stats.lines, Some(1));
assert_eq!(stats.words, Some(0));
assert_eq!(stats.chars, Some(1));
}
#[test]
fn short_ascii_line() {
let stats = analyse_string(
"the quick brown fox jumps over the lazy dog",
ANALYSIS_OPTIONS_LWC,
);
assert_eq!(stats.lines, Some(1));
assert_eq!(stats.words, Some(9));
assert_eq!(stats.chars, Some(43));
}
#[test]
fn short_ascii_paragraph() {
let stats = analyse_string(
"lorem ipsum,\ndolor sit amet,\nconsectetur,\nadipiscing elit",
ANALYSIS_OPTIONS_LWC,
);
assert_eq!(stats.lines, Some(4));
assert_eq!(stats.words, Some(8));
assert_eq!(stats.chars, Some(57));
}
|
// Copyright (c) Meta Platforms, Inc. and affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
pub mod naming_table;
#[cfg(test)]
mod test_naming_table;
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Result;
use datastore::ChangesStore;
use datastore::Store;
use decl_parser::DeclParser;
use file_provider::DiskProvider;
use file_provider::FileProvider;
use folded_decl_provider::FoldedDeclProvider;
use folded_decl_provider::LazyFoldedDeclProvider;
use naming_provider::NamingProvider;
use naming_table::NamingTable;
use ocamlrep::ptr::UnsafeOcamlPtr;
use ocamlrep::FromOcamlRep;
use ocamlrep::ToOcamlRep;
use oxidized::global_options::GlobalOptions;
use pos::ConstName;
use pos::FunName;
use pos::MethodName;
use pos::ModuleName;
use pos::PropName;
use pos::RelativePath;
use pos::RelativePathCtx;
use pos::TypeName;
use shallow_decl_provider::LazyShallowDeclProvider;
use shallow_decl_provider::ShallowDeclProvider;
use shallow_decl_provider::ShallowDeclStore;
use shm_store::OcamlShmStore;
use shm_store::ShmStore;
use ty::decl;
use ty::decl::folded::FoldedClass;
use ty::reason::BReason as BR;
pub struct HhServerProviderBackend {
path_ctx: Arc<RelativePathCtx>,
opts: GlobalOptions,
decl_parser: DeclParser<BR>,
file_store: Arc<ChangesStore<RelativePath, FileType>>,
file_provider: Arc<FileProviderWithChanges>,
naming_table: Arc<NamingTable>,
/// Collection of Arcs pointing to the backing stores for the
/// ShallowDeclStore below, allowing us to invoke push/pop_local_changes.
shallow_decl_changes_store: Arc<ShallowStoreWithChanges>,
shallow_decl_store: Arc<ShallowDeclStore<BR>>,
lazy_shallow_decl_provider: Arc<LazyShallowDeclProvider<BR>>,
folded_classes_shm: Arc<OcamlShmStore<TypeName, Arc<FoldedClass<BR>>>>,
folded_classes_store: Arc<ChangesStore<TypeName, Arc<FoldedClass<BR>>>>,
folded_decl_provider: Arc<LazyFoldedDeclProvider<BR>>,
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct Config {
pub path_ctx: RelativePathCtx,
pub opts: GlobalOptions,
pub db_path: Option<PathBuf>,
}
impl HhServerProviderBackend {
pub fn new(config: Config) -> Result<Self> {
let Config {
path_ctx,
opts,
db_path,
} = config;
let path_ctx = Arc::new(path_ctx);
let file_store = Arc::new(ChangesStore::new(Arc::new(ShmStore::new(
"File",
shm_store::Evictability::NonEvictable,
shm_store::Compression::default(),
))));
let file_provider = Arc::new(FileProviderWithChanges {
delta_and_changes: Arc::clone(&file_store),
disk: DiskProvider::new(Arc::clone(&path_ctx), None),
});
let decl_parser = DeclParser::with_options(Arc::clone(&file_provider) as _, opts.clone());
let dependency_graph = Arc::new(depgraph_api::NoDepGraph::new());
let naming_table = Arc::new(NamingTable::new(db_path)?);
let shallow_decl_changes_store =
Arc::new(ShallowStoreWithChanges::new(opts.tco_populate_member_heaps));
let shallow_decl_store = shallow_decl_changes_store.as_shallow_decl_store();
let lazy_shallow_decl_provider = Arc::new(LazyShallowDeclProvider::new(
Arc::clone(&shallow_decl_store),
Arc::clone(&naming_table) as _,
decl_parser.clone(),
));
let folded_classes_shm = Arc::new(OcamlShmStore::new(
"FoldedClasses",
shm_store::Evictability::Evictable,
shm_store::Compression::default(),
));
let folded_classes_store =
Arc::new(ChangesStore::new(Arc::clone(&folded_classes_shm) as _));
let folded_decl_provider = Arc::new(LazyFoldedDeclProvider::new(
Arc::new(opts.clone()),
Arc::clone(&folded_classes_store) as _,
Arc::clone(&lazy_shallow_decl_provider) as _,
dependency_graph,
));
Ok(Self {
path_ctx,
opts,
file_store,
file_provider,
decl_parser,
folded_decl_provider,
naming_table,
shallow_decl_changes_store,
shallow_decl_store,
lazy_shallow_decl_provider,
folded_classes_shm,
folded_classes_store,
})
}
pub fn config(&self) -> Config {
Config {
path_ctx: (*self.path_ctx).clone(),
db_path: self.naming_table.db_path(),
opts: self.opts.clone(),
}
}
pub fn opts(&self) -> &GlobalOptions {
&self.opts
}
pub fn naming_table(&self) -> &NamingTable {
&self.naming_table
}
pub fn file_store(&self) -> &dyn Store<RelativePath, FileType> {
&*self.file_store
}
pub fn shallow_decl_provider(&self) -> &dyn ShallowDeclProvider<BR> {
&*self.lazy_shallow_decl_provider
}
/// Decl-parse the given file, dedup duplicate definitions of the same
/// symbol (within the file, as well as removing losers of naming conflicts
/// with other files), and add the parsed decls to the shallow decl store.
pub fn parse_and_cache_decls<'a>(
&self,
path: RelativePath,
text: &'a [u8],
arena: &'a bumpalo::Bump,
) -> Result<oxidized_by_ref::direct_decl_parser::ParsedFileWithHashes<'a>> {
let mut parsed_file = self.decl_parser.parse_impl(path, text, arena);
self.lazy_shallow_decl_provider
.dedup_and_add_decls(path, parsed_file.decls.iter().map(Into::into))?;
parsed_file.decls.rev(arena); // To match OCaml behavior
Ok(parsed_file.into())
}
/// Directly add the given decls to the shallow decl store (without removing
/// php_stdlib decls, deduping, or removing naming conflict losers).
pub fn add_decls(
&self,
decls: &[&(&str, oxidized_by_ref::shallow_decl_defs::Decl<'_>)],
) -> Result<()> {
self.shallow_decl_store
.add_decls(decls.iter().copied().map(Into::into))
}
pub fn push_local_changes(&self) {
self.file_store.push_local_changes();
self.naming_table.push_local_changes();
self.shallow_decl_changes_store.push_local_changes();
self.folded_classes_store.push_local_changes();
}
pub fn pop_local_changes(&self) {
self.file_store.pop_local_changes();
self.naming_table.pop_local_changes();
self.shallow_decl_changes_store.pop_local_changes();
self.folded_classes_store.pop_local_changes();
}
// ---
// Deletion support
pub fn oldify_funs_batch(&self, names: &[FunName]) -> Result<()> {
let funs: &ChangesStore<FunName, _> = &self.shallow_decl_changes_store.funs;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = FunName::new(format!("old${}", name));
if funs.contains_key(name)? {
moves.push((name, old_name));
} else if funs.contains_key(old_name)? {
deletes.push(old_name);
}
}
funs.move_batch(&mut moves.into_iter())?;
funs.remove_batch(&mut deletes.into_iter())?;
Ok(())
}
pub fn remove_funs_batch(&self, names: &[FunName]) -> Result<()> {
self.shallow_decl_changes_store
.funs
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_funs_batch(
&self,
names: &[FunName],
) -> Result<std::collections::BTreeMap<FunName, Option<Arc<decl::FunDecl<BR>>>>> {
let funs: &ChangesStore<FunName, _> = &self.shallow_decl_changes_store.funs;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = FunName::new(format!("old${}", name));
res.insert(name, funs.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_funs_batch(&self, names: &[FunName]) -> Result<()> {
let funs: &ChangesStore<FunName, _> = &self.shallow_decl_changes_store.funs;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = FunName::new(format!("old${}", name));
if funs.contains_key(old_name)? {
deletes.push(old_name);
}
}
funs.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_shallow_classes_batch(&self, names: &[TypeName]) -> Result<()> {
let classes: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.classes;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if classes.contains_key(name)? {
moves.push((name, old_name));
} else if classes.contains_key(old_name)? {
deletes.push(old_name);
}
}
classes.move_batch(&mut moves.into_iter())?;
classes.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn get_old_shallow_classes_batch(
&self,
names: &[TypeName],
) -> Result<std::collections::BTreeMap<TypeName, Option<Arc<decl::ShallowClass<BR>>>>> {
let classes: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.classes;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
res.insert(name, classes.get(old_name)?);
}
Ok(res)
}
pub fn remove_shallow_classes_batch(&self, names: &[TypeName]) -> Result<()> {
self.shallow_decl_changes_store
.classes
.remove_batch(&mut names.iter().copied())
}
pub fn remove_old_shallow_classes_batch(&self, names: &[TypeName]) -> Result<()> {
let classes: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.classes;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if classes.contains_key(old_name)? {
deletes.push(old_name);
}
}
classes.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_folded_classes_batch(&self, names: &[TypeName]) -> Result<()> {
let classes: &ChangesStore<TypeName, _> = &self.folded_classes_store;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if classes.contains_key(name)? {
moves.push((name, old_name));
} else if classes.contains_key(old_name)? {
deletes.push(old_name);
}
}
classes.move_batch(&mut moves.into_iter())?;
classes.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn get_old_folded_classes_batch(
&self,
names: &[TypeName],
) -> Result<std::collections::BTreeMap<TypeName, Option<Arc<FoldedClass<BR>>>>> {
let classes: &ChangesStore<TypeName, _> = &self.folded_classes_store;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
res.insert(name, classes.get(old_name)?);
}
Ok(res)
}
pub fn remove_folded_classes_batch(&self, names: &[TypeName]) -> Result<()> {
self.folded_classes_store
.remove_batch(&mut names.iter().copied())
}
pub fn remove_old_folded_classes_batch(&self, names: &[TypeName]) -> Result<()> {
let classes: &ChangesStore<TypeName, _> = &self.folded_classes_store;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if classes.contains_key(old_name)? {
deletes.push(old_name);
}
}
classes.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_typedefs_batch(&self, names: &[TypeName]) -> Result<()> {
let typedefs: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.typedefs;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if typedefs.contains_key(name)? {
moves.push((name, TypeName::new(format!("old${}", name))));
} else if typedefs.contains_key(old_name)? {
deletes.push(old_name);
}
}
typedefs.move_batch(&mut moves.into_iter())?;
typedefs.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_typedefs_batch(&self, names: &[TypeName]) -> Result<()> {
self.shallow_decl_changes_store
.typedefs
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_typedefs_batch(
&self,
names: &[TypeName],
) -> Result<std::collections::BTreeMap<TypeName, Option<Arc<decl::TypedefDecl<BR>>>>> {
let typedefs: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.typedefs;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
res.insert(name, typedefs.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_typedefs_batch(&self, names: &[TypeName]) -> Result<()> {
let typedefs: &ChangesStore<TypeName, _> = &self.shallow_decl_changes_store.typedefs;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if typedefs.contains_key(old_name)? {
deletes.push(old_name);
}
}
typedefs.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_gconsts_batch(&self, names: &[ConstName]) -> Result<()> {
let consts: &ChangesStore<ConstName, _> = &self.shallow_decl_changes_store.consts;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = ConstName::new(format!("old${}", name));
if consts.contains_key(name)? {
moves.push((name, old_name));
} else if consts.contains_key(old_name)? {
deletes.push(old_name);
}
}
consts.move_batch(&mut moves.into_iter())?;
consts.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_gconsts_batch(&self, names: &[ConstName]) -> Result<()> {
self.shallow_decl_changes_store
.consts
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_gconsts_batch(
&self,
names: &[ConstName],
) -> Result<std::collections::BTreeMap<ConstName, Option<Arc<decl::ConstDecl<BR>>>>> {
let consts: &ChangesStore<ConstName, _> = &self.shallow_decl_changes_store.consts;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = ConstName::new(format!("old${}", name));
res.insert(name, consts.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_gconsts_batch(&self, names: &[ConstName]) -> Result<()> {
let consts: &ChangesStore<ConstName, _> = &self.shallow_decl_changes_store.consts;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = ConstName::new(format!("old${}", name));
if consts.contains_key(old_name)? {
deletes.push(old_name);
}
}
consts.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_modules_batch(&self, names: &[ModuleName]) -> Result<()> {
let modules: &ChangesStore<ModuleName, _> = &self.shallow_decl_changes_store.modules;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = ModuleName::new(format!("old${}", name));
if modules.contains_key(name)? {
moves.push((name, old_name));
} else if modules.contains_key(old_name)? {
deletes.push(old_name);
}
}
modules.move_batch(&mut moves.into_iter())?;
modules.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_modules_batch(&self, names: &[ModuleName]) -> Result<()> {
self.shallow_decl_changes_store
.modules
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_modules_batch(
&self,
names: &[ModuleName],
) -> Result<std::collections::BTreeMap<ModuleName, Option<Arc<decl::ModuleDecl<BR>>>>> {
let modules: &ChangesStore<ModuleName, _> = &self.shallow_decl_changes_store.modules;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = ModuleName::new(format!("old${}", name));
res.insert(name, modules.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_modules_batch(&self, names: &[ModuleName]) -> Result<()> {
let modules: &ChangesStore<ModuleName, _> = &self.shallow_decl_changes_store.modules;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = ModuleName::new(format!("old${}", name));
if modules.contains_key(old_name)? {
deletes.push(old_name);
}
}
modules.remove_batch(&mut deletes.into_iter())
}
pub fn oldify_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
let props: &ChangesStore<(TypeName, PropName), _> = &self.shallow_decl_changes_store.props;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if props.contains_key(name)? {
moves.push((name, old_name));
} else if props.contains_key(old_name)? {
deletes.push(old_name);
}
}
props.move_batch(&mut moves.into_iter())?;
props.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
self.shallow_decl_changes_store
.props
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_props_batch(
&self,
names: &[(TypeName, PropName)],
) -> Result<std::collections::BTreeMap<(TypeName, PropName), Option<decl::Ty<BR>>>> {
let props: &ChangesStore<(TypeName, PropName), _> = &self.shallow_decl_changes_store.props;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
res.insert(name, props.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
let props: &ChangesStore<(TypeName, PropName), _> = &self.shallow_decl_changes_store.props;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if props.contains_key(old_name)? {
deletes.push(old_name);
}
}
props.remove_batch(&mut deletes.iter().copied())
}
pub fn oldify_static_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
let static_props: &ChangesStore<(TypeName, PropName), _> =
&self.shallow_decl_changes_store.static_props;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if static_props.contains_key(name)? {
moves.push((name, old_name));
} else if static_props.contains_key(old_name)? {
deletes.push(old_name);
}
}
static_props.move_batch(&mut moves.into_iter())?;
static_props.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_static_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
self.shallow_decl_changes_store
.static_props
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_static_props_batch(
&self,
names: &[(TypeName, PropName)],
) -> Result<std::collections::BTreeMap<(TypeName, PropName), Option<decl::Ty<BR>>>> {
let static_props: &ChangesStore<(TypeName, PropName), _> =
&self.shallow_decl_changes_store.static_props;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
res.insert(name, static_props.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_static_props_batch(&self, names: &[(TypeName, PropName)]) -> Result<()> {
let static_props: &ChangesStore<(TypeName, PropName), _> =
&self.shallow_decl_changes_store.static_props;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if static_props.contains_key(old_name)? {
deletes.push(old_name);
}
}
static_props.remove_batch(&mut deletes.iter().copied())
}
pub fn oldify_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
let methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.methods;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if methods.contains_key(name)? {
moves.push((name, old_name));
} else if methods.contains_key(old_name)? {
deletes.push(old_name);
}
}
methods.move_batch(&mut moves.into_iter())?;
methods.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn get_old_methods_batch(
&self,
names: &[(TypeName, MethodName)],
) -> Result<std::collections::BTreeMap<(TypeName, MethodName), Option<decl::Ty<BR>>>> {
let methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.methods;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
res.insert(name, methods.get(old_name)?);
}
Ok(res)
}
pub fn remove_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
self.shallow_decl_changes_store
.methods
.remove_batch(&mut names.iter().copied())
}
pub fn remove_old_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
let methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.methods;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if methods.contains_key(old_name)? {
deletes.push(old_name);
}
}
methods.remove_batch(&mut deletes.iter().copied())
}
pub fn oldify_static_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
let static_methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.static_methods;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if static_methods.contains_key(name)? {
moves.push((name, old_name));
} else if static_methods.contains_key(old_name)? {
deletes.push(old_name);
}
}
static_methods.move_batch(&mut moves.into_iter())?;
static_methods.remove_batch(&mut deletes.iter().copied())?;
Ok(())
}
pub fn remove_static_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
self.shallow_decl_changes_store
.static_methods
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_static_methods_batch(
&self,
names: &[(TypeName, MethodName)],
) -> Result<std::collections::BTreeMap<(TypeName, MethodName), Option<decl::Ty<BR>>>> {
let static_methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.static_methods;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
res.insert(name, static_methods.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_static_methods_batch(&self, names: &[(TypeName, MethodName)]) -> Result<()> {
let static_methods: &ChangesStore<(TypeName, MethodName), _> =
&self.shallow_decl_changes_store.static_methods;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = (TypeName::new(format!("old${}", name.0)), name.1);
if static_methods.contains_key(old_name)? {
deletes.push(old_name);
}
}
static_methods.remove_batch(&mut deletes.iter().copied())
}
pub fn oldify_constructors_batch(&self, names: &[TypeName]) -> Result<()> {
let constructors: &ChangesStore<TypeName, _> =
&self.shallow_decl_changes_store.constructors;
let mut moves = Vec::new();
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if constructors.contains_key(name)? {
moves.push((name, old_name));
} else if constructors.contains_key(old_name)? {
deletes.push(old_name);
}
}
constructors.move_batch(&mut moves.into_iter())?;
constructors.remove_batch(&mut deletes.into_iter())?;
Ok(())
}
pub fn remove_constructors_batch(&self, names: &[TypeName]) -> Result<()> {
self.shallow_decl_changes_store
.constructors
.remove_batch(&mut names.iter().copied())
}
pub fn get_old_constructors_batch(
&self,
names: &[TypeName],
) -> Result<std::collections::BTreeMap<TypeName, Option<decl::Ty<BR>>>> {
let constructors: &ChangesStore<TypeName, _> =
&self.shallow_decl_changes_store.constructors;
let mut res = std::collections::BTreeMap::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
res.insert(name, constructors.get(old_name)?);
}
Ok(res)
}
pub fn remove_old_constructors_batch(&self, names: &[TypeName]) -> Result<()> {
let constructors: &ChangesStore<TypeName, _> =
&self.shallow_decl_changes_store.constructors;
let mut deletes = Vec::new();
for &name in names.iter() {
let old_name = TypeName::new(format!("old${}", name));
if constructors.contains_key(old_name)? {
deletes.push(old_name);
}
}
constructors.remove_batch(&mut deletes.into_iter())
}
//
// ---
}
impl rust_provider_backend_api::RustProviderBackend<BR> for HhServerProviderBackend {
fn file_provider(&self) -> &dyn FileProvider {
&*self.file_provider
}
fn naming_provider(&self) -> &dyn NamingProvider {
&*self.naming_table
}
fn folded_decl_provider(&self) -> &dyn FoldedDeclProvider<BR> {
&*self.folded_decl_provider
}
fn as_any(&self) -> &dyn std::any::Any {
self
}
}
#[rustfmt::skip]
impl HhServerProviderBackend {
/// SAFETY: This method (and all other `get_ocaml_` methods) call into the
/// OCaml runtime and may trigger a GC. Must be invoked from the main thread
/// with no concurrent interaction with the OCaml runtime. The returned
/// `UnsafeOcamlPtr` is unrooted and could be invalidated if the GC is
/// triggered after this method returns.
pub unsafe fn get_ocaml_shallow_class(&self, name: TypeName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.classes.has_local_change(name) { None }
else { self.shallow_decl_changes_store.classes_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_typedef(&self, name: TypeName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.typedefs.has_local_change(name) { None }
else { self.shallow_decl_changes_store.typedefs_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_fun(&self, name: pos::FunName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.funs.has_local_change(name) { None }
else { self.shallow_decl_changes_store.funs_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_const(&self, name: pos::ConstName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.consts.has_local_change(name) { None }
else { self.shallow_decl_changes_store.consts_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_module(&self, name: pos::ModuleName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.modules.has_local_change(name) { None }
else { self.shallow_decl_changes_store.modules_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_folded_class(&self, name: TypeName) -> Option<UnsafeOcamlPtr> {
if self.folded_classes_store.has_local_change(name) { None }
else { self.folded_classes_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_property(&self, name: (TypeName, pos::PropName)) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.props.has_local_change(name) { None }
else { self.shallow_decl_changes_store.props_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_static_property(&self, name: (TypeName, pos::PropName)) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.static_props.has_local_change(name) { None }
else { self.shallow_decl_changes_store.static_props_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_method(&self, name: (TypeName, pos::MethodName)) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.methods.has_local_change(name) { None }
else { self.shallow_decl_changes_store.methods_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_static_method(&self, name: (TypeName, pos::MethodName)) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.static_methods.has_local_change(name) { None }
else { self.shallow_decl_changes_store.static_methods_shm.get_ocaml_value(name) }
}
pub unsafe fn get_ocaml_constructor(&self, name: TypeName) -> Option<UnsafeOcamlPtr> {
if self.shallow_decl_changes_store.constructors.has_local_change(name) { None }
else { self.shallow_decl_changes_store.constructors_shm.get_ocaml_value(name) }
}
}
#[rustfmt::skip]
struct ShallowStoreWithChanges {
classes: Arc<ChangesStore <TypeName, Arc<decl::ShallowClass<BR>>>>,
classes_shm: Arc<OcamlShmStore<TypeName, Arc<decl::ShallowClass<BR>>>>,
typedefs: Arc<ChangesStore <TypeName, Arc<decl::TypedefDecl<BR>>>>,
typedefs_shm: Arc<OcamlShmStore<TypeName, Arc<decl::TypedefDecl<BR>>>>,
funs: Arc<ChangesStore <pos::FunName, Arc<decl::FunDecl<BR>>>>,
funs_shm: Arc<OcamlShmStore<pos::FunName, Arc<decl::FunDecl<BR>>>>,
consts: Arc<ChangesStore <pos::ConstName, Arc<decl::ConstDecl<BR>>>>,
consts_shm: Arc<OcamlShmStore<pos::ConstName, Arc<decl::ConstDecl<BR>>>>,
modules: Arc<ChangesStore <pos::ModuleName, Arc<decl::ModuleDecl<BR>>>>,
modules_shm: Arc<OcamlShmStore<pos::ModuleName, Arc<decl::ModuleDecl<BR>>>>,
props: Arc<ChangesStore <(TypeName, pos::PropName), decl::Ty<BR>>>,
props_shm: Arc<OcamlShmStore<(TypeName, pos::PropName), decl::Ty<BR>>>,
static_props: Arc<ChangesStore <(TypeName, pos::PropName), decl::Ty<BR>>>,
static_props_shm: Arc<OcamlShmStore<(TypeName, pos::PropName), decl::Ty<BR>>>,
methods: Arc<ChangesStore <(TypeName, pos::MethodName), decl::Ty<BR>>>,
methods_shm: Arc<OcamlShmStore<(TypeName, pos::MethodName), decl::Ty<BR>>>,
static_methods: Arc<ChangesStore <(TypeName, pos::MethodName), decl::Ty<BR>>>,
static_methods_shm: Arc<OcamlShmStore<(TypeName, pos::MethodName), decl::Ty<BR>>>,
constructors: Arc<ChangesStore <TypeName, decl::Ty<BR>>>,
constructors_shm: Arc<OcamlShmStore<TypeName, decl::Ty<BR>>>,
store_view: Arc<ShallowDeclStore<BR>>,
}
impl ShallowStoreWithChanges {
#[rustfmt::skip]
fn new(populate_member_heaps: bool) -> Self {
use shm_store::{Compression, Evictability::Evictable};
let classes_shm = Arc::new(OcamlShmStore::new("Classes", Evictable, Compression::default()));
let typedefs_shm = Arc::new(OcamlShmStore::new("Typedefs", Evictable, Compression::default()));
let funs_shm = Arc::new(OcamlShmStore::new("Funs", Evictable, Compression::default()));
let consts_shm = Arc::new(OcamlShmStore::new("Consts", Evictable, Compression::default()));
let modules_shm = Arc::new(OcamlShmStore::new("Modules", Evictable, Compression::default()));
let props_shm = Arc::new(OcamlShmStore::new("Props", Evictable, Compression::default()));
let static_props_shm = Arc::new(OcamlShmStore::new("StaticProps", Evictable, Compression::default()));
let methods_shm = Arc::new(OcamlShmStore::new("Methods", Evictable, Compression::default()));
let static_methods_shm = Arc::new(OcamlShmStore::new("StaticMethods", Evictable, Compression::default()));
let constructors_shm = Arc::new(OcamlShmStore::new("Constructors", Evictable, Compression::default()));
let classes = Arc::new(ChangesStore::new(Arc::clone(&classes_shm) as _));
let typedefs = Arc::new(ChangesStore::new(Arc::clone(&typedefs_shm) as _));
let funs = Arc::new(ChangesStore::new(Arc::clone(&funs_shm) as _));
let consts = Arc::new(ChangesStore::new(Arc::clone(&consts_shm) as _));
let modules = Arc::new(ChangesStore::new(Arc::clone(&modules_shm) as _));
let props = Arc::new(ChangesStore::new(Arc::clone(&props_shm) as _));
let static_props = Arc::new(ChangesStore::new(Arc::clone(&static_props_shm) as _));
let methods = Arc::new(ChangesStore::new(Arc::clone(&methods_shm) as _));
let static_methods = Arc::new(ChangesStore::new(Arc::clone(&static_methods_shm) as _));
let constructors = Arc::new(ChangesStore::new(Arc::clone(&constructors_shm) as _));
let store_view = if populate_member_heaps {
Arc::new(ShallowDeclStore::new(
Arc::clone(&classes) as _,
Arc::clone(&typedefs) as _,
Arc::clone(&funs) as _,
Arc::clone(&consts) as _,
Arc::clone(&modules) as _,
Arc::clone(&props) as _,
Arc::clone(&static_props) as _,
Arc::clone(&methods) as _,
Arc::clone(&static_methods) as _,
Arc::clone(&constructors) as _,
))
} else {
Arc::new(ShallowDeclStore::with_no_member_stores(
Arc::clone(&classes) as _,
Arc::clone(&typedefs) as _,
Arc::clone(&funs) as _,
Arc::clone(&consts) as _,
Arc::clone(&modules) as _,
))
};
Self {
classes,
typedefs,
funs,
consts,
modules,
props,
static_props,
methods,
static_methods,
constructors,
classes_shm,
typedefs_shm,
funs_shm,
consts_shm,
modules_shm,
props_shm,
static_props_shm,
methods_shm,
static_methods_shm,
constructors_shm,
store_view,
}
}
fn push_local_changes(&self) {
self.classes.push_local_changes();
self.typedefs.push_local_changes();
self.funs.push_local_changes();
self.consts.push_local_changes();
self.modules.push_local_changes();
self.props.push_local_changes();
self.static_props.push_local_changes();
self.methods.push_local_changes();
self.static_methods.push_local_changes();
self.constructors.push_local_changes();
}
fn pop_local_changes(&self) {
self.classes.pop_local_changes();
self.typedefs.pop_local_changes();
self.funs.pop_local_changes();
self.consts.pop_local_changes();
self.modules.pop_local_changes();
self.props.pop_local_changes();
self.static_props.pop_local_changes();
self.methods.pop_local_changes();
self.static_methods.pop_local_changes();
self.constructors.pop_local_changes();
}
fn as_shallow_decl_store(&self) -> Arc<ShallowDeclStore<BR>> {
Arc::clone(&self.store_view)
}
}
#[derive(Clone, Debug, ToOcamlRep, FromOcamlRep)]
#[derive(serde::Serialize, serde::Deserialize)]
pub enum FileType {
Disk(bstr::BString),
Ide(bstr::BString),
}
#[derive(Debug)]
struct FileProviderWithChanges {
// We could use DeltaStore here if not for the fact that the OCaml
// implementation of `File_provider.get` does not fall back to disk when the
// given path isn't present in sharedmem/local_changes (it only does so for
// `File_provider.get_contents`).
delta_and_changes: Arc<ChangesStore<RelativePath, FileType>>,
disk: DiskProvider,
}
impl FileProvider for FileProviderWithChanges {
fn get(&self, file: RelativePath) -> Result<bstr::BString> {
match self.delta_and_changes.get(file)? {
Some(FileType::Disk(contents)) => Ok(contents),
Some(FileType::Ide(contents)) => Ok(contents),
None => match self.disk.read(file) {
Ok(contents) => Ok(contents),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok("".into()),
Err(e) => Err(e.into()),
},
}
}
}
|
//! Integer types used by SNASM.
//!
//! SNASM needs to work with a number of different integer types, including
//! the oddball 24-bit 65816 addresses. This module provides types for handling
//! integer values and types cleanly.
use std::fmt;
use std::fmt::Display;
use std::io;
use std::ops::Add;
use std::ops::AddAssign;
use std::ops::Neg;
use std::ops::Not;
use serde::Deserialize;
use serde::Deserializer;
use serde::Serialize;
use serde::Serializer;
/// A variable-width integer of 8, 16, or 24 bits.
///
/// An `Int` is not meaningfully signed or unsigned; it is merely a collection
/// of bits. The `to_u32()` and `to_i32()` can be used to unify the underlying
/// value via either zero or sign extension.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Int {
/// An 8-bit address.
I8(u8),
/// An 16-bit address.
I16(u16),
/// An 24-bit address.
I24(u24),
}
impl Int {
/// Creates a new `Int` with the given bits and the given width inside it.
///
/// This function will truncate any extraneous bits in `val`.
#[inline]
pub fn new(val: u32, width: Width) -> Self {
match width {
Width::I8 => Self::I8(val as u8),
Width::I16 => Self::I16(val as u16),
Width::I24 => Self::I24(u24::from_u32(val)),
}
}
/// Tries to create a new `Int` with the smallest possible width.
///
/// This function uses `Width::smallest_for()` to find the smallest width that
/// fits all of the significant bits in `val`.
#[inline]
pub fn best_fit(val: u32) -> Option<Self> {
Width::smallest_for(val).map(|w| Self::new(val as u32, w))
}
/// Zero-extends this `Int` to the given width.
pub fn zero_extend(self, width: Width) -> Self {
Self::new(self.to_u32(), width)
}
/// Zero-extends this `Int` to the given width.
///
/// Returns `None` if `self` is wider than `width`.
pub fn zero_extend_checked(self, width: Width) -> Option<Self> {
if self.width() > width {
return None;
}
Some(self.zero_extend(width))
}
/// Gets the width of this `Int`.
#[inline]
pub fn width(self) -> Width {
match self {
Self::I8(_) => Width::I8,
Self::I16(_) => Width::I16,
Self::I24(_) => Width::I24,
}
}
/// Zero-extends the value in this `Int` to a `u32`.
#[inline]
pub fn to_u32(self) -> u32 {
match self {
Self::I8(n) => n as u32,
Self::I16(n) => n as u32,
Self::I24(n) => n.to_u32(),
}
}
/// Sign-extends the value in this `Int` to a `i32`.
#[inline]
pub fn to_i32(self) -> i32 {
match self {
Self::I8(n) => n as i8 as i32,
Self::I16(n) => n as i16 as i32,
Self::I24(n) => n.to_i32(),
}
}
/// Returns an iterator over this `Int`'s bytes, in little-endian order.
pub fn le_bytes(self) -> impl Iterator<Item = u8> {
struct Iter(Int, usize);
impl Iterator for Iter {
type Item = u8;
fn next(&mut self) -> Option<u8> {
let val = match self.0 {
Int::I8(n) => [n].get(self.1).cloned(),
Int::I16(n) => n.to_le_bytes().get(self.1).cloned(),
Int::I24(n) => n.to_le_bytes().get(self.1).cloned(),
};
if val.is_some() {
self.1 += 1;
}
val
}
}
Iter(self, 0)
}
/// Reads a little-endian `Int` of the given width from a `Read`.
pub fn read_le(width: Width, mut r: impl io::Read) -> io::Result<Self> {
match width {
Width::I8 => {
let mut buf = [0; 1];
r.read_exact(&mut buf)?;
Ok(Int::I8(buf[0]))
}
Width::I16 => {
let mut buf = [0; 2];
r.read_exact(&mut buf)?;
Ok(Int::I16(u16::from_le_bytes(buf)))
}
Width::I24 => {
let mut buf = [0; 3];
r.read_exact(&mut buf)?;
Ok(Int::I24(u24::from_le_bytes(buf)))
}
}
}
/// Writes a little-endian `Int` to a `Write`.
pub fn write_le(&self, mut w: impl io::Write) -> io::Result<()> {
match self {
Int::I8(n) => w.write_all(&[*n])?,
Int::I16(n) => w.write_all(&n.to_le_bytes())?,
Int::I24(n) => w.write_all(&n.to_le_bytes())?,
}
Ok(())
}
}
impl From<u8> for Int {
#[inline]
fn from(n: u8) -> Self {
Int::I8(n)
}
}
impl From<u16> for Int {
#[inline]
fn from(n: u16) -> Self {
Int::I16(n)
}
}
impl From<u24> for Int {
#[inline]
fn from(n: u24) -> Self {
Int::I24(n)
}
}
macro_rules! impl_fmt_int {
($($trait:ident),*) => {
$(impl fmt::$trait for Int {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::$trait::fmt(&self.to_u32(), f)
}
})*
}
}
impl_fmt_int!(Display, Binary, Octal, LowerHex, UpperHex);
impl Neg for Int {
type Output = Self;
fn neg(self) -> Self {
match self {
Self::I8(n) => Self::I8((n as i8).neg() as u8),
Self::I16(n) => Self::I16((n as i16).neg() as u16),
Self::I24(n) => Self::I24(-n),
}
}
}
impl Not for Int {
type Output = Self;
fn not(self) -> Self {
match self {
Self::I8(n) => Self::I8(!n),
Self::I16(n) => Self::I16(!n),
Self::I24(n) => Self::I24(!n),
}
}
}
/// A 24-bit 64816 address.
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct u24 {
/// The "bank byte", that is, the top byte of the address determining which
/// bank it corresponds to.
pub bank: u8,
/// A 16-bit address within a bank.
pub addr: u16,
}
impl u24 {
/// Creates a `u24` by truncating a `u32`.
#[inline]
pub const fn from_u32(i: u32) -> Self {
Self {
bank: (i >> 16) as u8,
addr: i as u16,
}
}
/// Zero-extends this `u24` into a `u32`.
#[inline]
pub const fn to_u32(self) -> u32 {
((self.bank as u32) << 16) | (self.addr as u32)
}
/// Sign-extends this `u24` into a `u32`.
#[inline]
pub const fn to_i32(self) -> i32 {
// NOTE: the `as i8 as i32` triggers sign extension. In particular, casting
// any signed type to any wider type (regardless of signedness) triggers
// sign extension.
((self.bank as i8 as i32) << 16) | (self.addr as i32)
}
/// Offsets this `u24`. This does not perform 24-bit arithmetic. Instead,
/// only the `addr` part is affected, always wrapping around on overflow.
#[must_use]
#[inline]
pub fn offset(self, offset: i16) -> Self {
let mut copy = self;
copy.addr = self.addr.wrapping_add(offset as u16);
copy
}
/// Like `offset`, but only returns a value if the computation would not
/// overflow into the next bank.
#[must_use]
#[inline]
pub fn offset_checked(self, offset: i16) -> Option<Self> {
self.addr.checked_add(offset as u16).map(|addr| u24 {
bank: self.bank,
addr,
})
}
/// Like `offset`, but actually performs a carry to the bank byte.
#[must_use]
#[inline]
pub fn offset_full(self, offset: i16) -> Self {
u24::from_u32(self.to_u32().wrapping_add(offset as u32))
}
/// Creates a new `u24` from the given bytes, in little-endian order.
#[inline]
pub fn from_le_bytes(bytes: [u8; 3]) -> Self {
Self {
bank: bytes[2],
addr: bytes[0] as u16 | ((bytes[1] as u16) << 8),
}
}
/// Converts this `u24`'s bytes into an array, in little-endian order.
#[inline]
pub fn to_le_bytes(self) -> [u8; 3] {
[
self.addr.to_le_bytes()[0],
self.addr.to_le_bytes()[1],
self.bank,
]
}
}
macro_rules! impl_fmt_u24 {
($($trait:ident),*) => {
$(impl fmt::$trait for u24 {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::$trait::fmt(&self.to_u32(), f)
}
})*
}
}
impl_fmt_u24!(Display, Binary, Octal, LowerHex, UpperHex);
impl Neg for u24 {
type Output = Self;
fn neg(self) -> Self {
let val = -self.to_i32();
Self::from_u32(val as u32)
}
}
impl Not for u24 {
type Output = Self;
fn not(self) -> Self {
Self {
bank: !self.bank,
addr: !self.addr,
}
}
}
impl Add<u16> for u24 {
type Output = Self;
fn add(mut self, addr: u16) -> Self {
self += addr;
self
}
}
impl AddAssign<u16> for u24 {
fn add_assign(&mut self, addr: u16) {
self.addr += addr;
}
}
impl<'de> Deserialize<'de> for u24 {
fn deserialize<D>(de: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let int = u64::deserialize(de)?;
if int > 0xffffff {
return Err(serde::de::Error::invalid_value(
serde::de::Unexpected::Unsigned(int),
&"24-bit integer",
));
}
Ok(u24::from_u32(int as u32))
}
}
impl Serialize for u24 {
fn serialize<S>(&self, ser: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.to_u32().serialize(ser)
}
}
/// An integer Width: a one, two, or three-byte integers.
///
/// This enum is ordered: smaller integer types compare smaller than bigger
/// integer types.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum Width {
/// A single byte.
I8,
/// A two-byte word.
I16,
/// A three-byte long.
I24,
}
impl Width {
/// Parses a string into an `Width`.
///
/// Valid names are `i8`, `i16` and `i24`, case-insensitive.
///
/// ```
/// # use snasm::int::Width;
/// assert_eq!(Width::from_name("i8"), Width::I8);
/// ```
#[inline]
pub fn from_name(s: &str) -> Option<Self> {
match s {
"i8" | "I8" => Some(Self::I8),
"i16" | "I16" => Some(Self::I16),
"i24" | "I24" => Some(Self::I24),
_ => None,
}
}
/// Returns a name for this type.
///
/// ```
/// # use snasm::int;
/// assert_eq!(Width::I8.name(), "i8");
/// ```
#[inline]
pub fn name(self) -> &'static str {
match self {
Self::I8 => "i8",
Self::I16 => "i16",
Self::I24 => "i24",
}
}
/// Returns the number of bits a value of this width contains.
///
/// ```
/// # use snasm::int::Width;
/// assert_eq!(Width::I8.bits(), 8);
/// ```
#[inline]
pub fn bits(self) -> u32 {
match self {
Self::I8 => 8,
Self::I16 => 16,
Self::I24 => 24,
}
}
/// Returns the number of bytes a value of this width contains.
///
/// ```
/// # use snasm::int::Width;
/// assert_eq!(Width::I8.bytes(), 1);
/// ```
#[inline]
pub fn bytes(self) -> u32 {
match self {
Self::I8 => 1,
Self::I16 => 2,
Self::I24 => 3,
}
}
/// Returns the mask for this type.
///
/// A type's mask can be used to extract the bottom `bits()` bits from an
/// integer.
#[inline]
pub fn mask(self) -> u32 {
match self {
Self::I8 => 0xff,
Self::I16 => 0xffff,
Self::I24 => 0xffffff,
}
}
/// Checks that the usigned integer `val` can fit into this `Width`.
///
/// ```
/// # use snasm::int::Width;
/// assert!(Width::I8.in_range(0));
/// assert!(Width::I8.in_range(128));
/// assert!(Width::I8.in_range(255));
/// assert!(!Width::I8.in_range(256));
/// ```
pub fn in_range(self, val: u32) -> bool {
val & !self.mask() == 0
}
/// Returns the smallest `Width` that fits `val`, if such exists.
/// ```
/// # use std::i32;
/// # use snasm::int::Width;
/// assert_eq!(Width::smallest_for(0), Some(Width::I8));
/// assert_eq!(Width::smallest_for(255), Some(Width::I8);
/// assert_eq!(Width::smallest_for(256), Some(Width::I16));
/// assert_eq!(Width::smallest_for(i32::MAX), None);
/// ```
pub fn smallest_for(val: u32) -> Option<Self> {
[Self::I8, Self::I16, Self::I24]
.iter()
.copied()
.find(|i| i.in_range(val))
}
}
impl Display for Width {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.name())
}
}
|
pub fn test() {
ex_mutable_string();
ex_object_reference();
ex_ownership_on_heap_data();
}
fn ex_mutable_string() {
let mut s = String::from("hello");
println!("{}", s);
s.push_str(", world!");
println!("{}", s);
}
fn ex_object_reference() {
let mut a = 5;
let b = a;
a = 6;
println!("a: {}, b: {}", a, b);
let s1 = String::from("hello");
let mut s2 = s1; //String is not 'Copy'; therefore this is a "move"
s2.push_str(", world!");
println!("s2: {}", s2);
}
fn ex_ownership_on_heap_data() {
demo_losing_ownership();
demo_take_and_return();
demo_take_and_return_tuple();
demo_take_reference();
demo_mut_reference();
demo_copy_trait();
}
fn demo_losing_ownership() {
let s = String::from("goodbye");
let fs: fn(String) = |s: String| {
println!("{}, world!", s);
// s.drop() is called here
};
fs(s);
// s is no longer accessible, and this line results in a compiler error
// println!("{}, world!", s);
}
fn demo_take_and_return() {
let freturn: fn(String) -> String = |s: String| {
println!("{}, world!", s);
s
};
let sreturn = String::from("Goodbye");
let sreturn = freturn(sreturn);
println!("{} and hello again!", sreturn);
}
fn demo_take_and_return_tuple() {
let s1 = String::from("testString");
fn calculate_length(s: String) -> (String, usize) {
let length = s.len();
(s, length)
}
let (s2, length) = calculate_length(s1);
println!("The length of '{}' is {}.", s2, length);
}
fn demo_take_reference() {
let s1 = String::from("referenceString");
fn calculate_length(s: &String) -> usize {
s.len()
}
let length = calculate_length(&s1);
println!("The length of '{}' is {}", s1, length);
}
fn demo_mut_reference() {
let mut s1 = String::from("mutable");
fn alter_string(s: &mut String) {
s.push_str("String");
}
alter_string(&mut s1);
println!("{}", s1);
}
fn demo_copy_trait() {
let x: i32 = 50;
(|x: i32| {
println!("x: {}", x);
// x is 'Copy', so only the local value is dropped
}).call((x, ));
println!("x: {}", x);
} |
use task_queue::TaskQueue;
use std::io::BufRead;
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
#[structopt(name = "tq", about = "Interact with your local task queue.")]
struct Opts {
#[structopt(long = "verbose", help = "Add extra logging")]
verbose: bool,
#[structopt(subcommand)]
op: Operation,
}
#[derive(StructOpt, Debug)]
enum Operation {
#[structopt(name = "clean")]
Clean,
#[structopt(name = "create")]
Create { command: Vec<String> },
#[structopt(name = "delete")]
Delete { task_ids: Vec<i64> },
#[structopt(name = "list")]
List,
#[structopt(name = "run")]
Run {
task_id: i64,
#[structopt(
long = "concurrency",
help = "How many tasks to run in parallel",
default_value = "1"
)]
concurrency: usize,
},
#[structopt(name = "show")]
Show { task_id: i64 },
}
fn main() {
let opts = Opts::from_args();
let mut tq = TaskQueue::new().unwrap();
match opts.op {
Operation::Clean => {
tq.clean().unwrap();
}
Operation::Create { command } => {
let stdin = std::io::stdin();
let inputs: Vec<String> = stdin.lock().lines().collect::<Result<Vec<_>, _>>().unwrap();
tq.push_task(command, inputs).unwrap();
}
Operation::Delete { task_ids } => {
for task_id in task_ids {
tq.remove_task(task_id).unwrap();
}
}
Operation::List => {
tq.list_tasks().unwrap();
}
Operation::Run {
task_id,
concurrency,
} => {
tq.run_task(task_id, concurrency).unwrap();
}
Operation::Show { task_id } => {
tq.show_task(task_id).unwrap();
}
};
}
|
// Copyright (c) 2021 Thomas J. Otterson
//
// This software is released under the MIT License.
// https://opensource.org/licenses/MIT
use std::ops::{Deref, DerefMut};
use std::{cell::RefCell, rc::Rc};
/// A vector with three extra operations on it dealing with shared, internally mutable
/// references.
///
/// The underlying `Vec` doesn't contain items of type `T` itself, but rather items of type
/// `Rc<RefCell<T>>`. This means that the items in the vector can be shared (`Rc`, allows
/// for multiple owners and ensures that the item is not deleted until all ownership has
/// been released) and do not have to convince the compiler that they're following the
/// borrowing rules (`RefCell`, which checks the borrow conditions at runtime instead of at
/// compile time).
///
/// The reason behind this is that in a project of this nature, there is a lot of sharing. A
/// `Pin` needs to be able to be owned and mutated by both the `Device` that it's a part of
/// and the `Trace` that connects it to other pins. The `Device` itself might be mutated by
/// any number of its own pins. And the `Trace` that has more than one output pin connected
/// to it needs to be able to be mutated by all of those output pins.
///
/// Furthermore, there are instances where a device mutates a pin, causing that pin to
/// mutate its trace, causing it to have to check the values of all of its pins (including
/// the one that just mutated it). Hence the pin has already been borrowed mutably, which
/// means it cannot be borrowed again to have its value checked (by the fundamental
/// borrowing rules of Rust, if data is borrowed mutably, it cannot also be borrowed
/// immutably).
///
/// All of this together means that 1) `Pin`s, `Trace`s, and `Device`s all need to be able
/// to be referenced by multiple other structs (hence the need for `Rc`), and 2) it is
/// impossible in some cases to prove to the compiler that borrowing rules are satisfied, so
/// those rules need to instead be checked at runtime (hence the need for `RefCell`).
///
/// The `Rc<RefCell<T>>` mechanism therefore gives a chance to make these things work, but
/// just wrapping all of your `Pin`s with `Rc::new(RefCell::new(pin))` isn't enough. The
/// borrowing rules are still checked. If nothing else changes it just means that, while
/// your program will compile now, it'll just panic at runtime.
///
/// This is a concern because of the nested nature of method calls in the
/// respond-to-an-event kind of mechanism that underlies everything in this project. Say you
/// have a `Pin` named pin, and its connected `Trace` (call it `trace`) changes its level by
/// calling `pin.update()`. Inside that `update` invocation, it'll call `pin.notify()`,
/// which will in turn notify its `Device` (device`) by calling `device.update()`. So at
/// this point, your call stack looks something like this:
///
/// ```text
/// pin calls device.update()
/// pin calls pin.notify()
/// trace calls pin.update() <-- mutated pin by changing its level
/// ```
/// Now, what if `device.update()` has code that calls `pin.level()` to check the pin's new
/// level? Well, that seems completely logical, but it can't. `trace` still holds a mutable
/// reference to `pin` way down at the bottom of the stack, and until it releases it by
/// having `pin.update()` complete, no other reference can be taken to `pin`. In Rust, a bit
/// of data (`pin` in this case) can have any number of immutable references taken, OR it
/// can have a single mutable reference taken. It cannot have both.
///
/// The situation does not improve merely by having `pin` be an `Rc<RefCell<Pin>>`. `trace`
/// still mutably borrows `pin` (this time with `pin.borrow_mut().update()`, provided by
/// `RefCell` to do the runtime-instead-of-compile-time borrow checking), and then `device`
/// still cannot call `pin.borrow().level()` because even runtime borrow checking has to
/// follow the rules.
///
/// Of course, this is solved by having `pin` be an `Rc<RefCell<Pin>>` AND having `trace` do
/// this instead: `Rc::clone(&pin).borrow_mut().update()`. (The difference in sheer number
/// of characters between this and `pin.update()` is why I use macros.) Now, there's still a
/// mutable reference, but it's to *a cloned reference* of `pin`. `pin` itself still gets
/// updated (it's a clone of a reference to `pin`, not a clone of `pin` itself), but when
/// `device` eventually wants to call `pin.borrow().level()`, it works. The mutable
/// reference was taken from a cloned reference to `pin`, not to `pin` itself, so `pin` at
/// this point has not had any mutable references taken to it.
///
/// So, after 75 lines of comments, how does this `RefVec` work into that? Because sometimes
/// it's hard to get a cloned reference *early* enough. In the above example, `trace` has to
/// be what makes the cloned reference. `device` can't do it in its `update()` method; if it
/// tries to clone `pin` with `Rc::clone(&pin)`, it will be duly informed that it can't take
/// a reference to `pin` because `trace` already has a mutable reference to it. And you
/// can't make a cloned reference without having a reference in the first place (hence
/// `&pin` being passed to `Rc::clone()`).
///
/// So the reference needs to be cloned before mutation happens. Most of the time, this
/// isn't that hard to do. But one case where it can be really hard to do is when you want
/// to use iterators. This is relevant, because there are a lot of places throughout this
/// code where iterators are far and away the best choice. For an easy example, if the CPU
/// wants to read a particular memory address, it'll use `utils::value_to_pins()` on its
/// address pins, which will iterate over all of its address pins and set them to the proper
/// values (with mutated references). This will trigger the pins to mutate the traces
/// they're connected to, and those traces are also connected to the memory's address pins,
/// which will be mutated in turn. The memory will then want to use `utils::pins_to_value()`
/// to iterate over its address pins to regenerate the address.
///
/// The problem is that normal iterators take normal references, possibly mutable ones.
/// `utils::value_to_pins()` *wants* to use `iter_mut()` to iterate over the pins, changing
/// each of them to the correct level. But doing so takes normal mutable references to each
/// pin, and then that pin can no longer be referenced again. The answer is to write our
/// *own* iterator, one which will returned *cloned references* rather than the regular ones
/// that normal iterators deal in.
///
/// So there's the entire point of `RefVec`. It's a vector (thanks to `deref`, which will
/// return a `Vec` to be used in any context that requires a `Vec` and not a `RefVec`) that
/// has an additional type of iterator that internally clones references, so the simple act
/// of creating an iterator doesn't mess everything up. It has a couple other new methods -
/// `get_ref()` is like `get` except it returns a cloned reference, and a `clone()`
/// implementation that will return a new `RefVec` of cloned references to all of the
/// original's items.
pub struct RefVec<T>(Vec<Rc<RefCell<T>>>);
/// Here is the iterator itself. It calls `Rc::clone()` on each item referenced in the
/// underlying vector and returns that instead of a plain reference.
pub struct RefIter<'a, T>(&'a [Rc<RefCell<T>>]);
impl<T> RefVec<T> {
/// Creates a new, empty `RefVec`.
pub const fn new() -> RefVec<T> {
RefVec(Vec::new())
}
/// Creates a new `RefVec` containing all of the items in the supplied vector. Note that
/// it does not create cloned references to these items; it's expected that the vector
/// already contains cloned references.
pub const fn with_vec(v: Vec<Rc<RefCell<T>>>) -> RefVec<T> {
RefVec(v)
}
/// Returns a cloned reference of an item in the vector.
pub fn get_ref(&self, index: usize) -> Rc<RefCell<T>> {
Rc::clone(&self[index])
}
/// Returns an iterator that itself returns cloned references to all of the underlying
/// items.
pub fn iter_ref(&self) -> RefIter<'_, T> {
RefIter(self.0.as_slice())
}
}
impl<'a, T> Iterator for RefIter<'a, T> {
type Item = Rc<RefCell<T>>;
fn next(&mut self) -> Option<Self::Item> {
match self.0.get(0) {
Some(item) => {
self.0 = &self.0[1..];
Some(Rc::clone(item))
}
None => None,
}
}
}
impl<T> Clone for RefVec<T> {
/// Returns a clone of the current `RefVec`. This clone will contain cloned references
/// to each of the references in the original vector.
fn clone(&self) -> Self {
RefVec(
self.0
.iter()
.map(|pin| Rc::clone(pin))
.collect::<Vec<Rc<RefCell<T>>>>(),
)
}
}
impl<T> Deref for RefVec<T> {
type Target = Vec<Rc<RefCell<T>>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for RefVec<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
|
// Copyright (c) 2021, Roel Schut. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use gdnative::api::ParallaxBackground;
use gdnative::prelude::*;
#[derive(NativeClass)]
#[inherit(ParallaxBackground)]
pub struct ParallaxStarBackground {
#[property(default = 1.0)]
speed: f32,
offset_scroll: Vector2,
}
#[methods]
impl ParallaxStarBackground {
fn new(_owner: &ParallaxBackground) -> Self {
ParallaxStarBackground {
speed: 1.0,
offset_scroll: Vector2::zero(),
}
}
#[export]
fn _process(&mut self, owner: &ParallaxBackground, delta: f32) {
self.offset_scroll.x -= self.speed * delta;
owner.set_scroll_offset(self.offset_scroll);
}
}
|
use std::ffi::CString;
use anyhow::Result;
use pasture_core::containers::{PointBufferWriteable, PointBufferWriteableExt};
use pasture_core::nalgebra::Vector3;
use pasture_core::containers::{PointBuffer, PointBufferExt};
use pasture_core::layout::attributes::POSITION_3D;
/// Wrapper around the proj types from the proj_sys crate. Supports transformations (the Rust proj bindings don't support this)
pub struct Projection {
proj_context: *mut proj_sys::projCtx_t,
projection: *mut proj_sys::PJconsts,
}
impl Projection {
pub fn new(source_crs: &str, target_crs: &str) -> Result<Self> {
let src_cstr = CString::new(source_crs)?;
let target_cstr = CString::new(target_crs)?;
unsafe {
let proj_context = proj_sys::proj_context_create();
let projection = proj_sys::proj_create_crs_to_crs(
proj_context,
src_cstr.as_ptr(),
target_cstr.as_ptr(),
std::ptr::null_mut(),
);
Ok(Self {
proj_context,
projection,
})
}
}
/// Performs a transformation of the given position
pub fn transform(&self, position: Vector3<f64>) -> Vector3<f64> {
unsafe {
let coord = proj_sys::proj_coord(position.x, position.y, position.z, 0.0);
let target_coords =
proj_sys::proj_trans(self.projection, proj_sys::PJ_DIRECTION_PJ_FWD, coord);
Vector3::new(target_coords.v[0], target_coords.v[1], target_coords.v[2])
}
}
}
impl Drop for Projection {
fn drop(&mut self) {
unsafe {
proj_sys::proj_destroy(self.projection);
proj_sys::proj_context_destroy(self.proj_context);
}
}
}
/// Reprojection Algorithm
/// Rewrites the 3D coordinates from the given point cloud to the given target coordinate reference system.
/// It iterates over all points in the given point cloud.
/// Make sure that source_crs and target_crs are valid coordinate reference systems.
///
/// # Panics
///
/// Panics if the PointLayout of this buffer does not contain the given attribute.
///
/// # Examples
///
/// ```
/// # use pasture_algorithms::reprojection::reproject_point_cloud_within;
/// # use pasture_core::containers::InterleavedVecPointStorage;
/// # use pasture_core::containers::PointBufferExt;
/// # use pasture_core::layout::PointType;
/// # use pasture_core::nalgebra::Vector3;
/// # use pasture_derive::PointType;
/// #[repr(C)]
/// #[derive(PointType, Debug, Clone, Copy)]
/// struct SimplePoint {
/// #[pasture(BUILTIN_POSITION_3D)]
/// pub position: Vector3<f64>,
/// #[pasture(BUILTIN_INTENSITY)]
/// pub intensity: u16,
/// }
/// fn main() {
/// let points = vec![
/// SimplePoint {
/// position: Vector3::new(1.0, 22.0, 0.0),
/// intensity: 42,
/// },
/// SimplePoint {
/// position: Vector3::new(12.0, 23.0, 0.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 8.0, 2.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 0.0, 1.0),
/// intensity: 84,
/// },
/// ];
/// let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
/// interleaved.push_points(points.as_slice());
/// let points = vec![
/// SimplePoint {
/// position: Vector3::new(1.0, 22.0, 0.0),
/// intensity: 42,
/// },
/// SimplePoint {
/// position: Vector3::new(12.0, 23.0, 0.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 8.0, 2.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 0.0, 1.0),
/// intensity: 84,
/// },
/// ];
/// let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
/// interleaved.push_points(points.as_slice());
/// reproject_point_cloud_within::<InterleavedVecPointStorage>(
/// &mut interleaved,
/// "EPSG:4326",
/// "EPSG:3309",
/// );
/// for point in interleaved.iter_point::<SimplePoint>() {
/// println!("{:?}", point);
/// }
/// }
/// ```
pub fn reproject_point_cloud_within<T: PointBuffer + PointBufferWriteable>(
point_cloud: &mut T,
source_crs: &str,
target_crs: &str,
) {
let proj = Projection::new(source_crs, target_crs).unwrap();
for index in 0..point_cloud.len() {
let point = point_cloud.get_attribute(&POSITION_3D, index);
let reproj = proj.transform(point);
point_cloud.set_attribute(&POSITION_3D, index, reproj);
}
}
/// Reprojection Algorithm
/// Rewrites the 3D coordinates from the given point cloud to the given target coordinate reference system.
/// It iterates over all points in the given point cloud.
/// Make sure that source_crs and target_crs are valid coordinate reference systems.
///
/// # Panics
///
/// Panics if the PointLayout of this buffer does not contain the given attribute.
///
/// # Examples
///
/// ```
/// # use pasture_algorithms::reprojection::reproject_point_cloud_between;
/// # use pasture_core::containers::InterleavedVecPointStorage;
/// # use pasture_core::containers::PerAttributeVecPointStorage;
/// # use pasture_core::containers::PointBuffer;
/// # use pasture_core::containers::PointBufferExt;
/// # use pasture_core::containers::PointBufferWriteable;
/// # use pasture_core::layout::PointType;
/// # use pasture_core::nalgebra::Vector3;
/// # use pasture_derive::PointType;
/// #[repr(C)]
/// #[derive(PointType, Debug, Clone, Copy)]
/// struct SimplePoint {
/// #[pasture(BUILTIN_POSITION_3D)]
/// pub position: Vector3<f64>,
/// #[pasture(BUILTIN_INTENSITY)]
/// pub intensity: u16,
/// }
/// fn main() {
/// let points = vec![
/// SimplePoint {
/// position: Vector3::new(1.0, 22.0, 0.0),
/// intensity: 42,
/// },
/// SimplePoint {
/// position: Vector3::new(12.0, 23.0, 0.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 8.0, 2.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 0.0, 1.0),
/// intensity: 84,
/// },
/// ];
/// let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
/// interleaved.push_points(points.as_slice());
/// let points = vec![
/// SimplePoint {
/// position: Vector3::new(1.0, 22.0, 0.0),
/// intensity: 42,
/// },
/// SimplePoint {
/// position: Vector3::new(12.0, 23.0, 0.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 8.0, 2.0),
/// intensity: 84,
/// },
/// SimplePoint {
/// position: Vector3::new(10.0, 0.0, 1.0),
/// intensity: 84,
/// },
/// ];
/// let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
/// interleaved.push_points(points.as_slice());
/// let mut attribute = PerAttributeVecPointStorage::with_capacity(interleaved.len(), SimplePoint::layout());
/// attribute.resize(interleaved.len());
/// reproject_point_cloud_between(&mut interleaved, &mut attribute, "EPSG:4326", "EPSG:3309");
/// for point in attribute.iter_point::<SimplePoint>() {
/// println!("{:?}", point);
/// }
/// }
/// ```
pub fn reproject_point_cloud_between<
T1: PointBuffer + PointBufferWriteable,
T2: PointBuffer + PointBufferWriteable,
>(
source_point_cloud: &mut T1,
target_point_cloud: &mut T2,
source_crs: &str,
target_crs: &str,
) {
if source_point_cloud.len() != target_point_cloud.len() {
panic!("The point clouds don't have the same size!");
}
let proj = Projection::new(source_crs, target_crs).unwrap();
for (index, point) in source_point_cloud
.iter_attribute::<Vector3<f64>>(&POSITION_3D)
.enumerate()
{
let reproj = proj.transform(point);
target_point_cloud.set_attribute(&POSITION_3D, index, reproj);
}
}
#[cfg(test)]
mod tests {
use pasture_core::{
containers::{InterleavedVecPointStorage, PerAttributeVecPointStorage},
layout::PointType,
nalgebra::Vector3,
};
use pasture_derive::PointType;
use super::*;
#[repr(C)]
#[derive(PointType, Debug, Clone, Copy)]
pub struct SimplePoint {
#[pasture(BUILTIN_POSITION_3D)]
pub position: Vector3<f64>,
#[pasture(BUILTIN_INTENSITY)]
pub intensity: u16,
}
#[test]
fn reproject_epsg4326_epsg3309_within() {
let points = vec![
SimplePoint {
position: Vector3::new(1.0, 22.0, 0.0),
intensity: 42,
},
SimplePoint {
position: Vector3::new(12.0, 23.0, 0.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 8.0, 2.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 0.0, 1.0),
intensity: 84,
},
];
let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
interleaved.push_points(points.as_slice());
reproject_point_cloud_within(&mut interleaved, "EPSG:4326", "EPSG:3309");
let results = vec![
Vector3::new(12185139.590523569, 7420953.944297638, 0.0),
Vector3::new(11104667.534080556, 7617693.973680517, 0.0),
Vector3::new(11055663.927418157, 5832081.512011217, 2.0),
Vector3::new(10807262.110686881, 4909128.916889962, 1.0),
];
for (index, coord) in interleaved
.iter_attribute::<Vector3<f64>>(&POSITION_3D)
.enumerate()
{
assert_eq!(coord, results[index]);
}
}
#[test]
fn reproject_epsg4326_epsg3309_between() {
let points = vec![
SimplePoint {
position: Vector3::new(1.0, 22.0, 0.0),
intensity: 42,
},
SimplePoint {
position: Vector3::new(12.0, 23.0, 0.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 8.0, 2.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 0.0, 1.0),
intensity: 84,
},
];
let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
interleaved.push_points(points.as_slice());
let mut attribute =
PerAttributeVecPointStorage::with_capacity(interleaved.len(), SimplePoint::layout());
attribute.resize(interleaved.len());
reproject_point_cloud_between(&mut interleaved, &mut attribute, "EPSG:4326", "EPSG:3309");
let results = vec![
Vector3::new(12185139.590523569, 7420953.944297638, 0.0),
Vector3::new(11104667.534080556, 7617693.973680517, 0.0),
Vector3::new(11055663.927418157, 5832081.512011217, 2.0),
Vector3::new(10807262.110686881, 4909128.916889962, 1.0),
];
for (index, coord) in attribute
.iter_attribute::<Vector3<f64>>(&POSITION_3D)
.enumerate()
{
assert_eq!(coord, results[index]);
}
}
#[test]
#[should_panic(expected = "The point clouds don't have the same size!")]
fn reproject_epsg4326_epsg3309_between_error() {
let points = vec![
SimplePoint {
position: Vector3::new(1.0, 22.0, 0.0),
intensity: 42,
},
SimplePoint {
position: Vector3::new(12.0, 23.0, 0.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 8.0, 2.0),
intensity: 84,
},
SimplePoint {
position: Vector3::new(10.0, 0.0, 1.0),
intensity: 84,
},
];
let mut interleaved = InterleavedVecPointStorage::new(SimplePoint::layout());
interleaved.push_points(points.as_slice());
let mut attribute = PerAttributeVecPointStorage::with_capacity(2, SimplePoint::layout());
attribute.resize(2);
reproject_point_cloud_between(&mut interleaved, &mut attribute, "EPSG:4326", "EPSG:3309");
}
}
|
pub use netlink_packet_core::{DecodeError, EncodeError};
use core::ops::Range;
/// Represent a multi-bytes field with a fixed size in a packet
pub(crate) type Field = Range<usize>;
pub mod status;
pub use self::status::*;
// 1000 - 1099 are for commanding the audit system
// 1100 - 1199 user space trusted application messages
// 1200 - 1299 messages internal to the audit daemon
// 1300 - 1399 audit event messages
// 1400 - 1499 SE Linux use
// 1500 - 1599 kernel LSPP events
// 1600 - 1699 kernel crypto events
// 1700 - 1799 kernel anomaly records
// 1800 - 1899 kernel integrity events
// 1900 - 1999 future kernel use
// 2000 is for otherwise unclassified kernel audit messages (legacy)
// 2001 - 2099 unused (kernel)
// 2100 - 2199 user space anomaly records
// 2200 - 2299 user space actions taken in response to anomalies
// 2300 - 2399 user space generated LSPP events
// 2400 - 2499 user space crypto events
// 2500 - 2999 future user space (maybe integrity labels and related events)
pub mod rules;
mod message;
pub use self::message::*;
mod buffer;
pub use self::buffer::*;
mod traits;
pub(crate) use self::traits::*;
pub mod archs;
pub mod commands;
pub mod constants;
pub mod events;
#[cfg(test)]
#[macro_use]
extern crate lazy_static;
|
/// An enum to represent all characters in the Khojki block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum Khojki {
/// \u{11200}: '𑈀'
LetterA,
/// \u{11201}: '𑈁'
LetterAa,
/// \u{11202}: '𑈂'
LetterI,
/// \u{11203}: '𑈃'
LetterU,
/// \u{11204}: '𑈄'
LetterE,
/// \u{11205}: '𑈅'
LetterAi,
/// \u{11206}: '𑈆'
LetterO,
/// \u{11207}: '𑈇'
LetterAu,
/// \u{11208}: '𑈈'
LetterKa,
/// \u{11209}: '𑈉'
LetterKha,
/// \u{1120a}: '𑈊'
LetterGa,
/// \u{1120b}: '𑈋'
LetterGga,
/// \u{1120c}: '𑈌'
LetterGha,
/// \u{1120d}: '𑈍'
LetterNga,
/// \u{1120e}: '𑈎'
LetterCa,
/// \u{1120f}: '𑈏'
LetterCha,
/// \u{11210}: '𑈐'
LetterJa,
/// \u{11211}: '𑈑'
LetterJja,
/// \u{11213}: '𑈓'
LetterNya,
/// \u{11214}: '𑈔'
LetterTta,
/// \u{11215}: '𑈕'
LetterTtha,
/// \u{11216}: '𑈖'
LetterDda,
/// \u{11217}: '𑈗'
LetterDdha,
/// \u{11218}: '𑈘'
LetterNna,
/// \u{11219}: '𑈙'
LetterTa,
/// \u{1121a}: '𑈚'
LetterTha,
/// \u{1121b}: '𑈛'
LetterDa,
/// \u{1121c}: '𑈜'
LetterDdda,
/// \u{1121d}: '𑈝'
LetterDha,
/// \u{1121e}: '𑈞'
LetterNa,
/// \u{1121f}: '𑈟'
LetterPa,
/// \u{11220}: '𑈠'
LetterPha,
/// \u{11221}: '𑈡'
LetterBa,
/// \u{11222}: '𑈢'
LetterBba,
/// \u{11223}: '𑈣'
LetterBha,
/// \u{11224}: '𑈤'
LetterMa,
/// \u{11225}: '𑈥'
LetterYa,
/// \u{11226}: '𑈦'
LetterRa,
/// \u{11227}: '𑈧'
LetterLa,
/// \u{11228}: '𑈨'
LetterVa,
/// \u{11229}: '𑈩'
LetterSa,
/// \u{1122a}: '𑈪'
LetterHa,
/// \u{1122b}: '𑈫'
LetterLla,
/// \u{1122c}: '𑈬'
VowelSignAa,
/// \u{1122d}: '𑈭'
VowelSignI,
/// \u{1122e}: '𑈮'
VowelSignIi,
/// \u{1122f}: '𑈯'
VowelSignU,
/// \u{11230}: '𑈰'
VowelSignE,
/// \u{11231}: '𑈱'
VowelSignAi,
/// \u{11232}: '𑈲'
VowelSignO,
/// \u{11233}: '𑈳'
VowelSignAu,
/// \u{11234}: '𑈴'
SignAnusvara,
/// \u{11235}: '𑈵'
SignVirama,
/// \u{11236}: '𑈶'
SignNukta,
/// \u{11237}: '𑈷'
SignShadda,
/// \u{11238}: '𑈸'
Danda,
/// \u{11239}: '𑈹'
DoubleDanda,
/// \u{1123a}: '𑈺'
WordSeparator,
/// \u{1123b}: '𑈻'
SectionMark,
/// \u{1123c}: '𑈼'
DoubleSectionMark,
/// \u{1123d}: '𑈽'
AbbreviationSign,
/// \u{1123e}: '𑈾'
SignSukun,
}
impl Into<char> for Khojki {
fn into(self) -> char {
match self {
Khojki::LetterA => '𑈀',
Khojki::LetterAa => '𑈁',
Khojki::LetterI => '𑈂',
Khojki::LetterU => '𑈃',
Khojki::LetterE => '𑈄',
Khojki::LetterAi => '𑈅',
Khojki::LetterO => '𑈆',
Khojki::LetterAu => '𑈇',
Khojki::LetterKa => '𑈈',
Khojki::LetterKha => '𑈉',
Khojki::LetterGa => '𑈊',
Khojki::LetterGga => '𑈋',
Khojki::LetterGha => '𑈌',
Khojki::LetterNga => '𑈍',
Khojki::LetterCa => '𑈎',
Khojki::LetterCha => '𑈏',
Khojki::LetterJa => '𑈐',
Khojki::LetterJja => '𑈑',
Khojki::LetterNya => '𑈓',
Khojki::LetterTta => '𑈔',
Khojki::LetterTtha => '𑈕',
Khojki::LetterDda => '𑈖',
Khojki::LetterDdha => '𑈗',
Khojki::LetterNna => '𑈘',
Khojki::LetterTa => '𑈙',
Khojki::LetterTha => '𑈚',
Khojki::LetterDa => '𑈛',
Khojki::LetterDdda => '𑈜',
Khojki::LetterDha => '𑈝',
Khojki::LetterNa => '𑈞',
Khojki::LetterPa => '𑈟',
Khojki::LetterPha => '𑈠',
Khojki::LetterBa => '𑈡',
Khojki::LetterBba => '𑈢',
Khojki::LetterBha => '𑈣',
Khojki::LetterMa => '𑈤',
Khojki::LetterYa => '𑈥',
Khojki::LetterRa => '𑈦',
Khojki::LetterLa => '𑈧',
Khojki::LetterVa => '𑈨',
Khojki::LetterSa => '𑈩',
Khojki::LetterHa => '𑈪',
Khojki::LetterLla => '𑈫',
Khojki::VowelSignAa => '𑈬',
Khojki::VowelSignI => '𑈭',
Khojki::VowelSignIi => '𑈮',
Khojki::VowelSignU => '𑈯',
Khojki::VowelSignE => '𑈰',
Khojki::VowelSignAi => '𑈱',
Khojki::VowelSignO => '𑈲',
Khojki::VowelSignAu => '𑈳',
Khojki::SignAnusvara => '𑈴',
Khojki::SignVirama => '𑈵',
Khojki::SignNukta => '𑈶',
Khojki::SignShadda => '𑈷',
Khojki::Danda => '𑈸',
Khojki::DoubleDanda => '𑈹',
Khojki::WordSeparator => '𑈺',
Khojki::SectionMark => '𑈻',
Khojki::DoubleSectionMark => '𑈼',
Khojki::AbbreviationSign => '𑈽',
Khojki::SignSukun => '𑈾',
}
}
}
impl std::convert::TryFrom<char> for Khojki {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'𑈀' => Ok(Khojki::LetterA),
'𑈁' => Ok(Khojki::LetterAa),
'𑈂' => Ok(Khojki::LetterI),
'𑈃' => Ok(Khojki::LetterU),
'𑈄' => Ok(Khojki::LetterE),
'𑈅' => Ok(Khojki::LetterAi),
'𑈆' => Ok(Khojki::LetterO),
'𑈇' => Ok(Khojki::LetterAu),
'𑈈' => Ok(Khojki::LetterKa),
'𑈉' => Ok(Khojki::LetterKha),
'𑈊' => Ok(Khojki::LetterGa),
'𑈋' => Ok(Khojki::LetterGga),
'𑈌' => Ok(Khojki::LetterGha),
'𑈍' => Ok(Khojki::LetterNga),
'𑈎' => Ok(Khojki::LetterCa),
'𑈏' => Ok(Khojki::LetterCha),
'𑈐' => Ok(Khojki::LetterJa),
'𑈑' => Ok(Khojki::LetterJja),
'𑈓' => Ok(Khojki::LetterNya),
'𑈔' => Ok(Khojki::LetterTta),
'𑈕' => Ok(Khojki::LetterTtha),
'𑈖' => Ok(Khojki::LetterDda),
'𑈗' => Ok(Khojki::LetterDdha),
'𑈘' => Ok(Khojki::LetterNna),
'𑈙' => Ok(Khojki::LetterTa),
'𑈚' => Ok(Khojki::LetterTha),
'𑈛' => Ok(Khojki::LetterDa),
'𑈜' => Ok(Khojki::LetterDdda),
'𑈝' => Ok(Khojki::LetterDha),
'𑈞' => Ok(Khojki::LetterNa),
'𑈟' => Ok(Khojki::LetterPa),
'𑈠' => Ok(Khojki::LetterPha),
'𑈡' => Ok(Khojki::LetterBa),
'𑈢' => Ok(Khojki::LetterBba),
'𑈣' => Ok(Khojki::LetterBha),
'𑈤' => Ok(Khojki::LetterMa),
'𑈥' => Ok(Khojki::LetterYa),
'𑈦' => Ok(Khojki::LetterRa),
'𑈧' => Ok(Khojki::LetterLa),
'𑈨' => Ok(Khojki::LetterVa),
'𑈩' => Ok(Khojki::LetterSa),
'𑈪' => Ok(Khojki::LetterHa),
'𑈫' => Ok(Khojki::LetterLla),
'𑈬' => Ok(Khojki::VowelSignAa),
'𑈭' => Ok(Khojki::VowelSignI),
'𑈮' => Ok(Khojki::VowelSignIi),
'𑈯' => Ok(Khojki::VowelSignU),
'𑈰' => Ok(Khojki::VowelSignE),
'𑈱' => Ok(Khojki::VowelSignAi),
'𑈲' => Ok(Khojki::VowelSignO),
'𑈳' => Ok(Khojki::VowelSignAu),
'𑈴' => Ok(Khojki::SignAnusvara),
'𑈵' => Ok(Khojki::SignVirama),
'𑈶' => Ok(Khojki::SignNukta),
'𑈷' => Ok(Khojki::SignShadda),
'𑈸' => Ok(Khojki::Danda),
'𑈹' => Ok(Khojki::DoubleDanda),
'𑈺' => Ok(Khojki::WordSeparator),
'𑈻' => Ok(Khojki::SectionMark),
'𑈼' => Ok(Khojki::DoubleSectionMark),
'𑈽' => Ok(Khojki::AbbreviationSign),
'𑈾' => Ok(Khojki::SignSukun),
_ => Err(()),
}
}
}
impl Into<u32> for Khojki {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for Khojki {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for Khojki {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl Khojki {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
Khojki::LetterA
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("Khojki{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use crate::echoinfo::{DeviceInfo, DeviceProtocolInfo};
use crate::hex;
use crate::line_driver::LineDriver;
use std::collections::{HashMap, HashSet};
use std::convert::TryFrom;
#[derive(PartialEq, Eq, Debug)]
pub enum LineResult {
OK,
NG,
}
#[derive(Debug)]
pub(crate) struct LineResponse<'a> {
pub result: LineResult,
pub host: &'a str,
pub eoj: &'a str,
pub property: &'a str,
pub data: Option<&'a str>,
}
impl LineResponse<'_> {
pub fn hosteoj(&self) -> String {
format!("{}:0x{}", self.host, self.eoj)
}
pub fn hexclass(&self) -> String {
format!("0x{}", &self.eoj[..4])
}
pub fn eoj(&self) -> String {
format!("0x{}", &self.eoj)
}
pub fn host(&self) -> String {
self.host.to_string()
}
pub fn is_ok(&self) -> bool {
match self.result {
LineResult::OK => true,
LineResult::NG => false,
}
}
}
impl<'a> TryFrom<&'a str> for LineResponse<'a> {
type Error = ();
fn try_from(source: &'a str) -> std::result::Result<Self, ()> {
let source = source.trim();
let parts = source
.split(|c| c == ':' || c == ',')
.collect::<Vec<&str>>();
let result = match *parts.get(0).ok_or(())? {
"OK" => LineResult::OK,
"NG" => LineResult::NG,
_ => return Err(()),
};
let host = parts.get(1).ok_or(())?;
let eoj = parts.get(2).ok_or(())?;
let property = parts.get(3).ok_or(())?;
let data = parts.get(4).copied();
Ok(LineResponse {
result,
host,
eoj,
property,
data,
})
}
}
const GETALLNODES: &str = "224.0.23.0:0ef000:0xD7";
pub(super) fn get_all_classes(driver: &mut LineDriver) -> std::io::Result<HashSet<String>> {
let res = driver.exec_multi(GETALLNODES)?;
Ok(res
.iter()
.map(|r| LineResponse::try_from(r.as_ref()))
//make sure we have a valid entry
.filter_map(|opt_r| opt_r.ok())
.inspect(|lr| println!("line response: {:?}", lr))
//keep only the data
.filter_map(|r| r.data)
//chop 4 bytes
.map(|d| &d[4..])
//keep all non-empty stuff
.filter(|d| !d.is_empty())
.map(|d| d.chars())
.flatten()
.collect::<Vec<_>>()
.chunks(4)
.map(|chunk| chunk.iter().collect::<String>())
.collect::<HashSet<_>>())
}
pub(super) fn class_intersect(
available: &HashSet<String>,
discovered: &HashSet<String>,
//eojs have "0x" prefix in the response
) -> HashSet<String> {
//do we cover all the discovered eojs?
let diff_string = discovered
.difference(available)
.cloned()
.collect::<Vec<String>>()
.join(", ");
if !diff_string.is_empty() {
println!("Warning: the following device classes were detected but *not* covered by the device definitions:");
println!("{}", diff_string);
} else {
println!("All discovered classes are covered");
}
let intersection = available.intersection(discovered);
//we need to map to "0x0000" format
intersection
.map(|class| format!("0x{}", class))
.collect::<HashSet<String>>()
}
pub(super) fn scan_classes(
classes: HashSet<String>,
driver: &mut LineDriver,
//eojs have "0x" prefix in the response
) -> HashMap<String, DeviceInfo> {
let cmd = classes
.into_iter()
//add the "00" instance to them
//and generate the get property map command
//two commands
.map(|eoj| format!("224.0.23.0:{}00:0x9F\n224.0.23.0:{0}00:0x9E\n", eoj))
.collect::<String>();
println!("Generated Scan command:\n{}", cmd);
//for the unwrap, see TODO in line_driver
let res = driver.exec_multi(&cmd).unwrap();
println!("scan results:\n{:?}", res);
let parsed = res
.iter()
.map(|r| LineResponse::try_from(r as &str).unwrap())
.collect::<Vec<LineResponse>>();
generate_devices(parsed)
}
//eojs have "0x" prefix in the response
fn generate_devices(parsed: Vec<LineResponse>) -> HashMap<String, DeviceInfo> {
let mut set = HashMap::with_capacity(parsed.len());
for resp in parsed {
//check for valid data
if resp.result == LineResult::NG || resp.data == None {
continue;
}
//we're good to go
let entry = set
.entry(resp.hosteoj())
.or_insert_with(|| DeviceInfo::new(resp.host(), resp.eoj()));
let props_u8 = hex::to_bytes(resp.data.unwrap());
if props_u8 == None {
println!("hex to bytes conversion failed!");
continue;
}
//safe to unwrap because of the above
if let Some(props) = parse_property_map(&props_u8.unwrap()) {
let text_props = props
.into_iter()
.map(|prop| format!("0x{:2X}", prop))
.collect::<Vec<String>>();
match resp.property {
"0x9F" => entry.r.extend(text_props.into_iter()),
"0x9E" => entry.w.extend(text_props.into_iter()),
other => println!("generate_devices: suspicious property: {}", other),
}
}
}
set
}
pub fn parse_property_map(property_map: &[u8]) -> Option<Vec<u8>> {
let howmany = *property_map.get(0)?;
if howmany < 16u8 {
//we trust that whatever we were sent is .. correct..
//we don't check if declared length actually matches
Some(property_map[1..].to_vec())
} else {
parse_binary_map(property_map)
}
}
fn parse_binary_map(property_map: &[u8]) -> Option<Vec<u8>> {
if property_map.len() != 17 {
return None;
}
Some(
property_map[1..]
.iter()
.enumerate()
.fold(vec![], |mut acc, (nth, byte)| {
(0..8).for_each(|i| {
if byte & (1u8 << i) != 0 {
acc.push(0x80u8 + (nth as u8) + (0x10u8 * i))
}
});
acc
}),
)
}
pub(super) fn scan_protoinfo(
classes: HashSet<String>,
driver: &mut LineDriver,
//eojs have "0x" prefix in the response
) -> std::io::Result<Vec<DeviceProtocolInfo>> {
fn map_info(info: DeviceProtocolInfo, responses: &[LineResponse]) -> DeviceProtocolInfo {
//get protocol info
let info = if let Some(proto_resp) = responses.iter().find(|resp| {
info.id.starts_with(resp.host)
&& resp.is_ok()
&& resp.property == "0x82"
&& resp.data.is_some()
}) {
info.with_protocol(proto_resp.data.unwrap().to_string())
} else {
info
};
//get manufacturer code
let info = if let Some(man_resp) = responses.iter().find(|resp| {
info.id.starts_with(resp.host)
&& resp.is_ok()
&& resp.property == "0x8A"
&& resp.data.is_some()
}) {
info.with_code(man_resp.data.unwrap().to_string())
} else {
info
};
info
}
let protoinfocommand = "224.0.23.0:0ef000:0x82\n".to_string();
let manufacturercommand = "224.0.23.0:0ef000:0x8A\n".to_string();
let appendixinfocommand = classes
.iter()
.map(|class| format!("224.0.23.0:{}00:0x82\n", class))
.collect::<String>();
let command = std::iter::once(protoinfocommand)
.chain(std::iter::once(manufacturercommand))
.chain(std::iter::once(appendixinfocommand))
.collect::<String>();
println!("scan_protoinfo command:\n{}", command);
let res = driver.exec_multi(&command)?;
let responses = res
.iter()
.map(|response| {
LineResponse::try_from(response.as_ref()).expect("died when collecting information")
})
.collect::<Vec<LineResponse<'_>>>();
//now, use the appendix info to get stuff done first
let infos = responses
.iter()
.filter(|lr| lr.property == "0x82" && !lr.eoj.starts_with("0ef0"))
.map(|lr| (lr, DeviceProtocolInfo::new(lr.hosteoj())))
.map(|(lr, info)| {
if let Some(data) = lr.data {
info.with_appendix(data.to_string())
} else {
info
}
})
//TODO process the rest of the stuff
.collect::<Vec<_>>();
let infos = infos
.into_iter()
.map(|info| {
//for each piece we have, update it by looking into the responses
map_info(info, &responses)
})
.collect::<Vec<_>>();
Ok(infos)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_property_map_is_none() {
assert_eq!(parse_property_map(&[]).is_none(), true);
}
#[test]
fn zero_properties_is_ok() {
let res = parse_property_map(&[0]).unwrap();
assert!(res.is_empty());
}
#[test]
fn property_maps_simple_ok() {
let pmap = [3u8, 0x80u8, 0x9Eu8, 0x9Du8];
let expected = [0x80u8, 0x9Eu8, 0x9D];
assert_eq!(parse_property_map(&pmap).unwrap(), expected);
}
#[test]
fn property_maps_binary_check() {
let bmap = [
24, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0xFF,
];
let result = parse_property_map(&bmap).unwrap();
let expected = vec![
0x80u8, 0x90u8, 0xA0u8, 0xB0u8, 0xC0u8, 0xD0u8, 0xE0u8, 0xF0u8, 0x81u8, 0x91u8, 0xA1u8,
0xB1u8, 0xC1u8, 0xD1u8, 0xE1u8, 0xF1u8, 0x8Fu8, 0x9Fu8, 0xAFu8, 0xBFu8, 0xCFu8, 0xDFu8,
0xEFu8, 0xFFu8,
];
println!("result: {:?}", result);
assert_eq!(result.len(), 24);
assert_eq!(result, expected);
}
}
|
use super::worker::*;
use super::*;
use crate::mmtk::MMTK;
use crate::vm::VMBinding;
use std::any::{type_name, TypeId};
pub trait Work<C: Context>: 'static + Send + Sync {
fn do_work(&mut self, worker: &mut Worker<C>, context: &'static C);
#[inline]
fn do_work_with_stat(&mut self, worker: &mut Worker<C>, context: &'static C) {
let stat = worker
.stat
.measure_work(TypeId::of::<Self>(), type_name::<Self>());
self.do_work(worker, context);
stat.end_of_work(&mut worker.stat);
}
}
/// A special kind of work that will execute on the coordinator (i.e. controller) thread
///
/// The coordinator thread holds the global monitor lock when executing `CoordinatorWork`s.
/// So, directly adding new work to any buckets will cause dead lock.
/// For this case, use `WorkBucket::add_with_priority_unsync` instead.
pub trait CoordinatorWork<C: Context>: 'static + Send + Sync + Work<C> {}
pub trait GCWork<VM: VMBinding>: 'static + Send + Sync + Sized + Work<MMTK<VM>> {
fn do_work(&mut self, worker: &mut GCWorker<VM>, mmtk: &'static MMTK<VM>);
}
impl<VM: VMBinding, W: GCWork<VM>> Work<MMTK<VM>> for W {
#[inline(always)]
default fn do_work(&mut self, worker: &mut Worker<MMTK<VM>>, mmtk: &'static MMTK<VM>) {
trace!("GCWork.do_work() {}", std::any::type_name::<W>());
GCWork::do_work(self, worker, mmtk)
}
}
|
extern crate permutohedron;
use permutohedron::heap_recursive;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;
fn get_total_distance(distances: &HashMap<String, i32>, path: &Vec<String>) -> i32 {
path.iter()
.take(path.len() - 1)
.zip(path.iter().skip(1))
.map(|locs| {
// TODO: Slightly gross. Can this be done w/o key?
let mut key = locs.0.clone();
key.push_str(&locs.1);
distances.get::<String>(&key).unwrap()
})
.sum()
}
// TODO: Works, but isn't type-checking at the moment. Swapped out for crate.
// fn permutations(list: &Vec<String>) -> Vec<Vec<String>> {
// match list.len() {
// 0 => vec![],
// 1 => vec![vec![list[0]]],
// 2 => vec![
// vec![list[0], list[1]],
// vec![list[1], list[0]],
// ],
// _ => {
// let mut perms = Vec::new();
// for i in 0..list.len() {
// let mut list_without_ith = Vec::new();
// for (j, item) in list.iter().enumerate() {
// if i != j {
// list_without_ith.push(item.clone());
// }
// }
// for next_perm in permutations(&list_without_ith) {
// let mut perm = Vec::new();
// perm.push(list[i]);
// for x in next_perm {
// perm.push(x);
// }
// perms.push(perm);
// }
// }
// perms
// }
// }
// }
fn main() {
let file = File::open("input.txt").expect("file not found");
let mut reader = BufReader::new(file);
let mut contents = String::new();
reader.read_to_string(&mut contents).expect("could not read input file");
let mut locations = HashSet::new();
let mut distances = HashMap::new();
for line in contents.lines() {
let parts = line.split(' ').collect::<Vec<_>>();
let from = parts[0];
let to = parts[2];
let distance = parts[4].parse().unwrap();
locations.insert(from);
locations.insert(to);
distances.insert([from, to].join(""), distance);
distances.insert([to, from].join(""), distance);
}
// TODO: Gross. Better way?
let mut locations_vec = Vec::new();
for location in locations {
locations_vec.push(location);
}
let mut min_distance = i32::max_value();
let mut max_distance = 0;
heap_recursive(&mut locations_vec, |path| {
// TODO: Gross. Can this be done with path more directly?
let mut this_path = Vec::new();
for p in path.to_vec() {
this_path.push(String::from(p));
}
let distance = get_total_distance(&distances, &this_path);
if distance < min_distance {
min_distance = distance;
}
if distance > max_distance {
max_distance = distance;
}
});
println!("A: {}", min_distance);
println!("B: {}", max_distance);
}
|
use std::ptr;
pub fn replace_with<T, F: FnOnce(T) -> T>(t: &mut T, f: F) {
let p = t as *mut T;
unsafe {
ptr::write(p, f(ptr::read(p)))
}
}
|
use std::collections::HashMap;
#[derive(Debug, Clone)]
enum Operand {
Value(u16),
Name(String),
}
impl Operand {
fn eval(&self, connections: &[Connection], wires: &mut HashMap<String, u16>) -> u16 {
match self {
Self::Value(n) => *n,
Self::Name(x) => signal(connections, find(connections, x), wires),
}
}
}
impl From<String> for Operand {
fn from(s: String) -> Self {
s.parse().map_or_else(|_| Operand::Name(s), Operand::Value)
}
}
#[derive(Debug, Clone)]
enum Op {
Copy(Operand),
Not(String),
And(Operand, Operand),
Or(Operand, Operand),
LShift(Operand, Operand),
RShift(Operand, Operand),
}
#[derive(Debug, Clone)]
struct Connection {
op: Op,
target: String,
}
impl std::str::FromStr for Connection {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split(" -> ");
let mut op = parts.next().unwrap().split(' ').map(|s| s.to_owned());
let op = match (op.next(), op.next(), op.next()) {
(Some(x), None, None) => Op::Copy(x.into()),
(Some(op), Some(x), None) if op == "NOT" => Op::Not(x),
(Some(x), Some(op), Some(y)) => {
let x = x.into();
let y = y.into();
if op == "AND" {
Op::And(x, y)
} else if op == "OR" {
Op::Or(x, y)
} else if op == "LSHIFT" {
Op::LShift(x, y)
} else if op == "RSHIFT" {
Op::RShift(x, y)
} else {
unreachable!()
}
}
_ => unreachable!(),
};
let target = parts.next().unwrap().to_owned();
Ok(Self { op, target })
}
}
fn find<'c>(connections: &'c [Connection], target: &str) -> &'c Connection {
connections.iter().find(|c| c.target == target).unwrap()
}
fn signal(connections: &[Connection], conn: &Connection, wires: &mut HashMap<String, u16>) -> u16 {
if let Some(&signal) = wires.get(&conn.target) {
return signal;
}
let signal = match conn.op {
Op::Copy(Operand::Value(n)) => n,
Op::Copy(Operand::Name(ref x)) => signal(connections, find(connections, x), wires),
Op::Not(ref x) => !signal(connections, find(connections, x), wires),
Op::And(ref x, ref y) => x.eval(connections, wires) & y.eval(connections, wires),
Op::Or(ref x, ref y) => x.eval(connections, wires) | y.eval(connections, wires),
Op::LShift(ref x, ref y) => x.eval(connections, wires) << y.eval(connections, wires),
Op::RShift(ref x, ref y) => x.eval(connections, wires) >> y.eval(connections, wires),
};
wires.insert(conn.target.clone(), signal);
signal
}
fn find_mut<'c>(connections: &'c mut [Connection], target: &str) -> &'c mut Connection {
connections.iter_mut().find(|c| c.target == target).unwrap()
}
fn main() {
let input: Vec<Connection> = std::fs::read_to_string("input")
.unwrap()
.lines()
.map(|l| l.parse().unwrap())
.collect();
part1(&input);
part2(&input);
}
fn part1(input: &[Connection]) {
println!("{}", signal(input, find(input, "a"), &mut HashMap::new()));
}
fn part2(input: &[Connection]) {
let mut input = input.to_vec();
let a = signal(&input, find(&input, "a"), &mut HashMap::new());
let b = find_mut(&mut input, "b");
b.op = Op::Copy(Operand::Value(a));
let a = signal(&input, find(&input, "a"), &mut HashMap::new());
println!("{}", a);
}
|
use std::time::Duration;
const WIDTH: usize = 100;
#[derive(Debug, Clone, Copy)]
enum Cell {
Allive,
Dead,
}
fn main() {
let mut grid = vec![Cell::Dead; WIDTH];
grid[WIDTH - 1] = Cell::Allive;
loop {
display(&grid);
grid = update(grid);
std::thread::sleep(Duration::from_millis(100));
}
}
fn update(old: Vec<Cell>) -> Vec<Cell> {
let len = old.len();
(0..WIDTH)
.map(|i| {
let a = old[i.wrapping_sub(1).rem_euclid(len)];
let b = old[i];
let c = old[i.wrapping_add(1).rem_euclid(len)];
is_alive(a, b, c)
})
.collect()
}
fn is_alive(a: Cell, b: Cell, c: Cell) -> Cell {
match (a, b, c) {
(Cell::Allive, Cell::Allive, Cell::Allive) => Cell::Dead, // 7
(Cell::Allive, Cell::Allive, Cell::Dead) => Cell::Allive, // 6
(Cell::Allive, Cell::Dead, Cell::Allive) => Cell::Allive, // 5
(Cell::Allive, Cell::Dead, Cell::Dead) => Cell::Dead, // 4
(Cell::Dead, Cell::Allive, Cell::Allive) => Cell::Allive, // 3
(Cell::Dead, Cell::Allive, Cell::Dead) => Cell::Allive, // 2
(Cell::Dead, Cell::Dead, Cell::Allive) => Cell::Allive, // 1
(Cell::Dead, Cell::Dead, Cell::Dead) => Cell::Dead, // 0
}
}
fn display(grid: &[Cell]) {
for cell in grid.iter() {
match cell {
Cell::Allive => print!("#"),
Cell::Dead => print!(" "),
}
}
println!();
}
|
use enet::*;
#[path = "./gamepacket.rs"]
mod gamepacket;
pub struct Actions {
pub items_dat: Option<Vec<u8>>,
}
impl Actions {
fn refresh_item_data(self, peer: &mut Peer<()>, channel: &u8) {
gamepacket::raw(&self.items_dat.unwrap())
.send(peer, channel);
}
fn enter_game(self, peer: &mut Peer<()>, channel: &u8) {
gamepacket::new()
.string("OnRequestWorldSelectMenu")
.string("default|\nadd_button|Showing: `wWorlds``|_catselect_|0.6|3529161471|\n")
.send(peer, channel);
}
pub fn match_it(self, action: &str, peer: &mut Peer<()>, channel: &u8) {
match action {
"refresh_item_data" => self.refresh_item_data(peer, channel),
"enter_game" => self.enter_game(peer, channel),
_ => println!("unhandled action: {}", action)
}
}
} |
use anyhow::Result;
use rusoto_core::Region;
use rusoto_s3::{S3Client, S3};
use tokio;
#[tokio::main]
async fn main() -> Result<()> {
let region = Region::default();
let client = S3Client::new(region);
let results = client.list_buckets().await?;
print!("{:?}", results);
Ok(())
}
|
#![allow(unused_parens)]
#![allow(unused_imports)]
use frame_support::{traits::Get, weights::Weight};
use sp_std::marker::PhantomData;
/// Weight functions for pallet_collator_selection.
pub struct WeightInfo<T>(PhantomData<T>);
impl<T: frame_system::Config> pallet_collator_selection::WeightInfo for WeightInfo<T> {
fn set_invulnerables(b: u32) -> Weight {
(18_481_000 as Weight)
// Standard Error: 0
.saturating_add((67_000 as Weight).saturating_mul(b as Weight))
.saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn set_desired_candidates() -> Weight {
(16_376_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn set_candidacy_bond() -> Weight {
(17_031_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight))
}
fn register_as_candidate(c: u32) -> Weight {
(72_345_000 as Weight)
// Standard Error: 0
.saturating_add((197_000 as Weight).saturating_mul(c as Weight))
.saturating_add(T::DbWeight::get().reads(4 as Weight))
.saturating_add(T::DbWeight::get().writes(2 as Weight))
}
fn leave_intent(c: u32) -> Weight {
(55_446_000 as Weight)
// Standard Error: 0
.saturating_add((153_000 as Weight).saturating_mul(c as Weight))
.saturating_add(T::DbWeight::get().reads(1 as Weight))
.saturating_add(T::DbWeight::get().writes(2 as Weight))
}
fn note_author() -> Weight {
(71_828_000 as Weight)
.saturating_add(T::DbWeight::get().reads(3 as Weight))
.saturating_add(T::DbWeight::get().writes(4 as Weight))
}
fn new_session(r: u32, c: u32) -> Weight {
(0 as Weight)
// Standard Error: 1_004_000
.saturating_add((110_066_000 as Weight).saturating_mul(r as Weight))
// Standard Error: 1_004_000
.saturating_add((152_035_000 as Weight).saturating_mul(c as Weight))
.saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(r as Weight)))
.saturating_add(T::DbWeight::get().reads((2 as Weight).saturating_mul(c as Weight)))
.saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(r as Weight)))
.saturating_add(T::DbWeight::get().writes((2 as Weight).saturating_mul(c as Weight)))
}
}
|
use eval::LuaResult;
use eval::LV;
use eval::LV::*;
use lua_stdlib::math::lfloat;
use lua_stdlib::math::{lua_coerce_float, lua_coerce_int};
use natives::lua_coerce_lnum;
use natives::lua_fmt_for_print;
use natives::lua_truthy;
pub fn lua_binop_eq<'a>(l: &LV, r: &LV) -> LV {
if lua_binop_eq_impl(l, r) {
return LuaTrue;
} else {
return LuaFalse;
}
}
pub fn lua_binop_neq<'a>(l: &LV, r: &LV) -> LV {
if !lua_binop_eq_impl(l, r) {
return LuaTrue;
} else {
return LuaFalse;
}
}
pub fn lua_binop_eq_impl(l: &LV, r: &LV) -> bool {
// println!("RUNNING EQ ON");
// dbg!(&l);
// dbg!(&r);
match l {
LuaNil => {
matches!(r, LuaNil)
}
Num(n) => match r {
Num(m) => n == m,
_ => false,
},
LuaS(n) => match r {
LuaS(m) => n == m,
_ => false,
},
LuaTrue => matches!(r, LuaTrue),
LuaFalse => matches!(r, LuaFalse),
LuaList(_) => todo!(),
LuaTable { id, .. } => match r {
LuaTable { id: id2, .. } => {
dbg!(l);
dbg!(r);
id == id2
}
_ => false,
},
LuaFunc { id, .. } => match r {
LuaFunc { id: id2, .. } => id == id2,
_ => false,
},
CodeIndex(_) => todo!(),
Code(_) => todo!(),
NameList(_, _) => todo!(),
NativeFunc { .. } => todo!(),
BytecodeFunc { .. } => todo!(),
PCall { .. } => todo!(),
}
}
pub fn lua_binop_leq<'a>(l: &LV, r: &LV) -> LV {
if lua_binop_leq_impl(l, r) {
return LuaTrue;
} else {
return LuaFalse;
}
}
pub fn lua_binop_leq_impl(l: &LV, r: &LV) -> bool {
match l {
LuaNil => {
matches!(r, LuaNil)
}
Num(n) => match r {
Num(m) => n <= m,
_ => false,
},
_ => {
dbg!(l);
dbg!(r);
panic!("need binop impl");
}
}
}
pub fn lua_exponent_eq<'a>(l: &LV, r: &LV) -> LuaResult {
match (lua_coerce_float(l), lua_coerce_float(r)) {
(Ok(ll), Ok(rr)) => Ok(lfloat(ll.powf(rr))),
_ => {
let msg = format!("Type mismatch on exponent. Received {0} and {1}", l, r);
return LuaErr::msg(msg);
}
}
}
// pub fn lua_binop_minus<'a>(l: &LV, r: &LV) -> LV {
// match l {
// Num(n) => match r {
// Num(m) => LV::Num(n - m),
// _ => panic!("FAILURE (need to implement lua-bubbling failure)"),
// },
// _ => {
// dbg!(l);
// dbg!(r);
// panic!("need binop impl");
// }
// }
// }
macro_rules! binop {
($impl_name: ident, $external_name: ident, $left: ident, $right: ident, $number_op: expr) => {
pub fn $impl_name(l: &LV, r: &LV) -> LuaResult {
match (lua_coerce_lnum(l), lua_coerce_lnum(r)) {
(Ok($left), Ok($right)) => Ok(LV::Num($number_op)),
(_, _) => {
// TODO add metamethod mechanisms here
// dbg!(lua_coerce_lnum(l));
// dbg!(lua_coerce_lnum(r));
return LuaErr::msg(format!("{0} and {1} can't be added ($impl_name)", l, r));
}
}
}
pub fn $external_name(l: &LV, r: &LV) -> LV {
$impl_name(l, r).unwrap()
}
};
}
binop!(lua_binop_plus_int, lua_binop_plus, l, r, &l + &r);
binop!(lua_binop_minus_int, lua_binop_minus, l, r, &l - &r);
binop!(lua_binop_times_int, lua_binop_times, l, r, &l * &r);
binop!(lua_binop_div, lua_binop_div_panic, l, r, &l / &r);
binop!(
lua_binop_floordiv,
lua_binop_floordiv_panic,
l,
r,
(&l / &r).floor()
);
binop!(
lua_binop_mod_int,
lua_binop_mod,
l,
r,
l - &((&l / &r).floor()) * &r
);
pub fn lua_binop_and<'a>(l: &LV, r: &LV) -> LV {
// lua considers false and nil to be falsy
// everything else is truthy
// and returns first operator if it is falsy, second otherwise
if lua_truthy(&l) {
return r.clone();
} else {
return l.clone();
}
}
pub fn lua_binop_or<'a>(l: &LV, r: &LV) -> LV {
// The result of the or operator is its first operand if it is not false;
// otherwise, the result is its second operand
if lua_truthy(&l) {
return l.clone();
} else {
return r.clone();
}
}
pub fn lua_binop_less<'a>(l: &LV, r: &LV) -> LV {
match l {
Num(n) => match r {
Num(m) => {
if n < m {
LV::LuaTrue
} else {
LV::LuaFalse
}
}
_ => panic!("FAILURE (need to implement lua-bubbling failure)"),
},
_ => {
dbg!(l);
dbg!(r);
panic!("need binop impl");
}
}
}
pub fn lua_binop_greater<'a>(l: &LV, r: &LV) -> LuaResult {
match (l, r) {
(Num(n), Num(m)) => Ok(if n > m { LV::LuaTrue } else { LV::LuaFalse }),
(LuaS(s), LuaS(t)) => Ok(if s > t { LV::LuaTrue } else { LV::LuaFalse }),
_ => LuaErr::msg(format!("Attempt to compare {} and {}", l, r).as_str()),
}
}
fn try_convert_i32(fv: LNum) -> Result<i32, String> {
let f = match fv {
LNum::Float(x) => x,
LNum::Int(x) => x as f64,
};
let cast_result = f as i32;
if f64::from(cast_result) != f {
Err("Cannot cast to integer".to_string())
} else {
Ok(cast_result)
}
}
use eval::LNum;
use numbers::unwrap_num_or_stringed_num;
use crate::eval::LuaErr;
/**
* shifting logic to match lua
* l << r when shift_left, l >> r when not shift_left
**/
fn lua_shift_logic(l: i64, r: i64, shift_left: bool) -> Result<i64, LuaErr> {
if r < 0 {
// reverse the direction
return lua_shift_logic(l, -r, !shift_left);
}
let downgraded_r = r as u32;
let shift_result = if shift_left {
// legit unsure of how this is supposed to work
l.overflowing_shl(downgraded_r)
} else {
l.overflowing_shr(downgraded_r)
};
if shift_result.1 {
// overflow
return Ok(0);
} else {
return Ok(shift_result.0);
}
}
pub fn lua_binop_lshift<'a>(l: &LV, r: &LV) -> LuaResult {
match (unwrap_num_or_stringed_num(l), unwrap_num_or_stringed_num(r)) {
(Ok(n), Ok(m)) => {
// we need to confirm if we have integer representations
let int_n = lua_coerce_int(&LV::Num(n))?;
let int_m = lua_coerce_int(&LV::Num(m))?;
let result = lua_shift_logic(int_n, int_m, true)?;
return Ok(Num(LNum::Int(result)));
}
_ => LuaErr::msg(format!("Bitshift attempt for {} and {}", l, r).as_str()),
}
}
pub fn lua_binop_rshift<'a>(l: &LV, r: &LV) -> LuaResult {
match (unwrap_num_or_stringed_num(l), unwrap_num_or_stringed_num(r)) {
(Ok(n), Ok(m)) => {
// we need to confirm if we have integer representations
let int_n = try_convert_i32(n);
let int_m = try_convert_i32(m);
match (int_n, int_m) {
(Ok(x), Ok(y)) => Ok(Num((x >> y).into())),
_ => LuaErr::msg(format!("No integer conversions for {} and {}", n, m).as_str()),
}
}
_ => LuaErr::msg(format!("Bitshift attempt for {} and {}", l, r).as_str()),
}
}
pub fn lua_binop_binor<'a>(l: &LV, r: &LV) -> LuaResult {
match (unwrap_num_or_stringed_num(l), unwrap_num_or_stringed_num(r)) {
(Ok(n), Ok(m)) => {
// we need to confirm if we have integer representations
let int_n = try_convert_i32(n);
let int_m = try_convert_i32(m);
match (int_n, int_m) {
(Ok(x), Ok(y)) => Ok(Num((x | y).into())),
_ => LuaErr::msg(format!("No integer conversions for {} and {}", n, m).as_str()),
}
}
_ => LuaErr::msg(format!("Bitshift attempt for {} and {}", l, r).as_str()),
}
}
pub fn lua_binop_binand<'a>(l: &LV, r: &LV) -> LuaResult {
match (unwrap_num_or_stringed_num(l), unwrap_num_or_stringed_num(r)) {
(Ok(n), Ok(m)) => {
// we need to confirm if we have integer representations
let int_n = try_convert_i32(n);
let int_m = try_convert_i32(m);
match (int_n, int_m) {
(Ok(x), Ok(y)) => Ok(Num((x & y).into())),
_ => LuaErr::msg(format!("No integer conversions for {} and {}", n, m).as_str()),
}
}
_ => LuaErr::msg(format!("Bitshift attempt for {} and {}", l, r).as_str()),
}
}
pub fn lua_binop_binxor<'a>(l: &LV, r: &LV) -> LuaResult {
match (unwrap_num_or_stringed_num(l), unwrap_num_or_stringed_num(r)) {
(Ok(n), Ok(m)) => {
// we need to confirm if we have integer representations
let int_n = try_convert_i32(n);
let int_m = try_convert_i32(m);
match (int_n, int_m) {
(Ok(x), Ok(y)) => Ok(Num((x ^ y).into())),
_ => LuaErr::msg(format!("No integer conversions for {} and {}", n, m).as_str()),
}
}
_ => LuaErr::msg(format!("Bitshift attempt for {} and {}", l, r).as_str()),
}
}
pub fn lua_binop_concat<'a>(l: &LV, r: &LV) -> LV {
// We can concatenate two strings with the concatenation operator .. (two dots).
// If any operand is a number, Lua converts this number to a string:
let left_value = match &l {
LuaS(s) => s.to_string(),
Num(_n) => lua_fmt_for_print(&l),
_ => {
dbg!(l);
panic!("invalid operand for concat");
}
};
let right_value = match &r {
LuaS(s) => s.to_string(),
Num(_n) => lua_fmt_for_print(&r),
_ => {
dbg!(r);
panic!("invalid operand for concat");
}
};
return LuaS(left_value + &right_value);
}
|
#![deny(clippy::pedantic)]
#![allow(clippy::useless_attribute)]
#![no_std]
#![feature(core_intrinsics)]
#![feature(stmt_expr_attributes)]
#![allow(incomplete_features)]
#![feature(generic_associated_types)]
#![feature(total_cmp)]
#![feature(specialization)]
#![feature(option_result_unwrap_unchecked)]
#[doc(hidden)]
pub extern crate alloc;
#[cfg(feature = "mpi")]
#[doc(hidden)]
extern crate rsmpi as mpi;
#[macro_use]
extern crate contracts;
#[macro_use]
extern crate typed_builder;
pub mod cogs;
pub mod event;
pub mod intrinsics;
pub mod landscape;
pub mod lineage;
pub mod reporter;
pub mod simulation;
|
extern crate rand;
use serde_yaml; // 0.8.7
use std::convert::TryFrom;
use rand::prelude::*;
use rand_pcg::Pcg64;
use regex::Regex;
use std::collections::BTreeMap;
use std::fs::File;
macro_rules! puts{
($($a:expr),*) => {
println!(concat!($(stringify!($a), " = {:?}, "),*), $($a),*);
}
}
static ASCII_SORTED: [char; 26] = [
'e', 't', 'a', 'i', 'n', 'o', 's', 'h', 'r', 'd', 'l', 'u', 'c', 'm', 'f', 'w', 'y', 'g', 'p',
'b', 'v', 'k', 'q', 'j', 'x', 'z',
];
fn obfuscate_input(input: &String, guesses: &Vec<char>) -> (String, u128) {
let ascii_re: Regex = Regex::new(r"[A-Za-z]").unwrap();
let mut missing = 0;
let mut result = String::from("");
for c in input.chars() {
if ascii_re.is_match(&c.to_string()) {
let lowercase: char = c.to_lowercase().collect::<Vec<char>>()[0];
if guesses.contains(&lowercase) {
result.push(c);
} else {
result.push('_');
missing += 1;
}
} else {
result.push(c);
}
}
return (result, missing);
}
fn random_unguessed_letter_from_input(input: &String, guesses: &Vec<char>) -> char {
let mut rng = Pcg64::seed_from_u64(2);
let mut next_char: char;
let input_lower = input.to_lowercase();
let mut guess_count = 0;
loop {
let char_index = rng.gen_range(0..ASCII_SORTED.len());
next_char = ASCII_SORTED[char_index];
// puts!(char_index, next_char);
let lowercase = next_char.to_lowercase().collect::<Vec<char>>()[0];
if !guesses.contains(&lowercase) && input_lower.contains(&lowercase.to_string()) {
return next_char;
}
guess_count += 1;
if guess_count > 100000 {
panic!("Whoops, infinite loop!");
}
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let config_file = File::open("./config.yml")?;
let config: BTreeMap<String, Vec<String>> = serde_yaml::from_reader(config_file)?;
let mut total_answers = 0;
for (answer_type, answers) in config {
for answer in answers {
total_answers += 1;
print!("Hint: ");
match answer_type.as_str() {
"actors" => println!("This person is an actor."),
"scientists" => println!("This person is a scientist."),
"fruits" => println!("This is a fruit."),
_ => panic!("Don't know how to handle {}!", answer_type),
}
println!();
let re = Regex::new(r"[A-Za-z]").unwrap();
let _result = re.replace_all(answer.as_str(), "_");
// let mut rng = Pcg64::seed_from_u64(2);
let mut guessed_letters: Vec<char> = Vec::new();
let mut missing_count: u128 = u128::try_from(answer.len()).unwrap();
let mut common_letter_guesses = 0;
while missing_count > 3 {
let (result, next_missing_count) = obfuscate_input(&answer, &guessed_letters);
missing_count = next_missing_count;
println!("{}", result);
let next_letter;
if common_letter_guesses < 4 {
next_letter = ASCII_SORTED[common_letter_guesses];
common_letter_guesses += 1;
} else {
// After guessing the 5 most common letters,
// just pull the letters directly from the answer (in a random order)
next_letter = random_unguessed_letter_from_input(&answer, &guessed_letters);
}
guessed_letters.push(next_letter);
println!("=> {}", next_letter);
let (_result, after_missing_count) = obfuscate_input(&answer, &guessed_letters);
let match_count = missing_count - after_missing_count;
match match_count {
0 => println!("- No {}'s!", next_letter),
1 => println!("- 1 {}!", next_letter),
_ => println!("- {} {}'s!", match_count, next_letter),
}
// let guessed_letters_fmt = guessed_letters
// .iter()
// .map(|c| c.to_string())
// .collect::<Vec<String>>()
// .join(", ");
// println!("- Guesses: {}", guessed_letters_fmt);
// println!("- {} letters missing", missing_count);
println!();
missing_count = after_missing_count;
}
println!("Would you like to make a guess?");
println!("Answer: {}", answer);
println!("\n---------------------------------------------\n");
}
}
puts!(total_answers);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_obfuscate_input() {
assert_eq!(
(String::from("_____"), 5),
obfuscate_input(&String::from("asdff"), &vec![])
);
assert_eq!(
(String::from("a____"), 4),
obfuscate_input(&String::from("asdff"), &vec!['a'])
);
assert_eq!(
(String::from("a__ff"), 2),
obfuscate_input(&String::from("asdff"), &vec!['a', 'f'])
);
assert_eq!(
(String::from("asdff"), 0),
obfuscate_input(&String::from("asdff"), &vec!['a', 's', 'd', 'f'])
);
assert_eq!(
(String::from("___ ______"), 9),
obfuscate_input(&String::from("Tom Cruise"), &vec![])
);
assert_eq!(
(String::from("T__ ______"), 8),
obfuscate_input(&String::from("Tom Cruise"), &vec!['t'])
);
assert_eq!(
(String::from("T_m ______"), 7),
obfuscate_input(&String::from("Tom Cruise"), &vec!['t', 'm'])
);
assert_eq!(
(String::from("T_m C_____"), 6),
obfuscate_input(&String::from("Tom Cruise"), &vec!['t', 'm', 'c'])
);
assert_eq!(
(String::from("Tom Cr____"), 4),
obfuscate_input(&String::from("Tom Cruise"), &vec!['t', 'o', 'm', 'c', 'r'])
);
assert_eq!(
(String::from("Tom Cr____"), 4),
obfuscate_input(
&String::from("Tom Cruise"),
&vec!['t', 'o', 'm', 'c', 'r', 'z']
)
);
assert_eq!(
(String::from("Tom Cruise"), 0),
obfuscate_input(
&String::from("Tom Cruise"),
&vec!['t', 'o', 'm', 'c', 'r', 'u', 'i', 's', 'e']
)
);
}
#[test]
fn test_random_unguessed_letter_from_input() {
assert_eq!(
'a',
random_unguessed_letter_from_input(&String::from("asdff"), &vec![])
);
assert_eq!(
'd',
random_unguessed_letter_from_input(&String::from("asdff"), &vec!['a'])
);
assert_eq!(
's',
random_unguessed_letter_from_input(&String::from("asdff"), &vec!['a', 'd'])
);
assert_eq!(
'd',
random_unguessed_letter_from_input(&String::from("sdff"), &vec![])
);
assert_eq!(
'u',
random_unguessed_letter_from_input(&String::from("foobarbazqux"), &vec![])
);
assert_eq!(
'a',
random_unguessed_letter_from_input(&String::from("foobarbazqux"), &vec!['u'])
);
}
#[test]
#[should_panic(expected = "Whoops, infinite loop!")]
fn test_random_unguessed_letter_from_input_panic() {
random_unguessed_letter_from_input(&String::from("asd"), &vec!['a', 's', 'd']);
}
}
|
mod assembler;
use std::env;
use assembler::assemble;
fn main() {
let args: Vec<_> = env::args().collect();
if args.len() < 3 {
println!(
"Please provide an input assembly file as well as an output file.
Eg: ./mu0_arm_assembler asm.s out.txt"
);
}
else {
assemble::assemble_to_file(&args[1].to_owned()[..], &args[2][..]);
}
}
|
// (C)opyleft 2013,2014 Frank Denis
/*!
* Bindings for the GeoIP library
*/
#![desc = "Bindings for the GeoIP library."]
#![license = "BSD"]
#![crate_name = "geoip"]
#![crate_type = "rlib"]
#![warn(non_camel_case_types,
non_upper_case_globals,
unused_qualifications)]
extern crate libc;
extern crate serialize;
use libc::{c_void, c_char, c_int, c_ulong, c_float};
use std::c_str::CString;
use std::fmt;
use std::io::net::ip::{IpAddr,Ipv4Addr,Ipv6Addr};
type RawGeoIp = *const c_void;
type In6Addr = [u8, ..16];
#[repr(C)]
struct GeoIpLookup {
netmask: c_int
}
impl GeoIpLookup {
fn new() -> GeoIpLookup {
GeoIpLookup {
netmask: 0
}
}
}
#[link(name = "GeoIP")]
extern {
fn GeoIP_open(dbtype: *const c_char, flags: c_int) -> RawGeoIp;
fn GeoIP_delete(db: RawGeoIp);
fn GeoIP_name_by_ipnum_gl(db: RawGeoIp, ipnum: c_ulong, gl: *mut GeoIpLookup) -> *const c_char;
fn GeoIP_name_by_ipnum_v6_gl(db: RawGeoIp, ipnum: In6Addr, gl: *mut GeoIpLookup) -> *const c_char;
fn GeoIP_record_by_ipnum(db: RawGeoIp, ipnum: c_ulong) -> *const GeoIpRecord;
fn GeoIP_record_by_ipnum_v6(db: RawGeoIp, ipnum: In6Addr) -> *const GeoIpRecord;
fn GeoIPRecord_delete(gir: *const GeoIpRecord);
fn GeoIP_set_charset(db: RawGeoIp, charset: c_int) -> c_int;
}
enum Charset {
UTF8 = 1
}
pub enum Options {
Standard = 0,
MemoryCache = 1,
CheckCache = 2,
IndexCache = 4,
MmapCache = 8
}
pub enum DBType {
CountryEdition = 1,
RegionEditionRev0 = 7,
CityEditionRev0 = 6,
ORGEdition = 5,
ISPEdition = 4,
CityEditionRev1 = 2,
RegionEditionRev1 = 3,
ProxyEdition = 8,
ASNUMEdition = 9,
NetSpeedEdition = 10,
DomainEdition = 11,
CountryEditionV6 = 12,
LocationAEdition = 13,
AccuracyRadiusEdition = 14,
LargeCountryEdition = 17,
LargeCountryEditionV6 = 18,
ASNumEditionV6 = 21,
ISPEditionV6 = 22,
ORGEditionV6 = 23,
DomainEditionV6 = 24,
LoctionAEditionV6 = 25,
RegistrarEdition = 26,
RegistrarEditionV6 = 27,
UserTypeEdition = 28,
UserTypeEditionV6 = 29,
CityEditionRev1V6 = 30,
CityEditionRev0V6 = 31,
NetSpeedEditionRev1 = 32,
NetSpeedEditionRev1V6 = 33,
CountryConfEdition = 34,
CityConfEdition = 35,
RegionConfEdition = 36,
PostalConfEdition = 37,
AccuracyRadiusEditionV6 = 38
}
pub struct GeoIp {
db: RawGeoIp
}
#[repr(C)]
pub struct GeoIpRecord {
country_code: *const c_char,
country_code3: *const c_char,
country_name: *const c_char,
region: *const c_char,
city: *const c_char,
postal_code: *const c_char,
latitude: c_float,
longitude: c_float,
dma_code: c_int,
area_code: c_int,
charset: c_int,
continent_code: *const c_char,
netmask: c_int
}
#[deriving(Decodable, Encodable)]
pub struct ASInfo {
pub asn: uint,
pub name: String,
pub netmask: uint
}
#[deriving(Decodable, Encodable)]
pub struct CityInfo {
pub country_code: Option<String>,
pub country_code3: Option<String>,
pub country_name: Option<String>,
pub region: Option<String>,
pub city: Option<String>,
pub postal_code: Option<String>,
pub latitude: f32,
pub longitude: f32,
pub dma_code: uint,
pub area_code: uint,
pub charset: uint,
pub continent_code: Option<String>,
pub netmask: uint
}
unsafe fn maybe_string(c_str: *const c_char) -> Option<String> {
c_str.as_ref().and_then(|opt| {
CString::new(opt, false).as_str().map(|s| s.to_string())
})
}
impl CityInfo {
unsafe fn from_geoiprecord(res: &GeoIpRecord) -> CityInfo {
CityInfo {
country_code: maybe_string(res.country_code),
country_code3: maybe_string(res.country_code3),
country_name: maybe_string(res.country_name),
region: maybe_string(res.region),
city: maybe_string(res.city),
postal_code: maybe_string(res.postal_code),
latitude: res.latitude as f32,
longitude: res.longitude as f32,
dma_code: res.dma_code as uint,
area_code: res.area_code as uint,
charset: res.charset as uint,
continent_code: maybe_string(res.continent_code),
netmask: res.netmask as uint
}
}
}
impl fmt::Show for ASInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}\t{}", self.asn, self.name)
}
}
enum CNetworkIp {
V4(c_ulong),
V6(In6Addr)
}
impl CNetworkIp {
fn new(ip: IpAddr) -> CNetworkIp {
match ip {
Ipv4Addr(a, b, c, d) => {
V4((a as c_ulong << 24) | (b as c_ulong << 16) |
(c as c_ulong << 8) | (d as c_ulong))
},
Ipv6Addr(a, b, c, d, e, f, g, h) => {
V6([(a >> 8) as u8, a as u8,
(b >> 8) as u8, b as u8,
(c >> 8) as u8, c as u8,
(d >> 8) as u8, d as u8,
(e >> 8) as u8, e as u8,
(f >> 8) as u8, f as u8,
(g >> 8) as u8, g as u8,
(h >> 8) as u8, h as u8])
}
}
}
}
impl GeoIp {
pub fn open(path: &Path, options: Options) -> Result<GeoIp, String> {
let file = match path.as_str() {
None => return Err(format!("Invalid path {}", path.display())),
Some(file) => file
};
let db = unsafe {
GeoIP_open(file.to_c_str().unwrap(), options as c_int)
};
if db.is_null() {
return Err(format!("Can't open {}", file));
}
if unsafe { GeoIP_set_charset(db, UTF8 as c_int) } != 0 {
return Err("Can't set charset to UTF8".to_string());
}
Ok(GeoIp { db: db })
}
pub fn city_info_by_ip(&self, ip: IpAddr) -> Option<CityInfo> {
let cres = match CNetworkIp::new(ip) {
V4(ip) => unsafe { GeoIP_record_by_ipnum(self.db, ip) },
V6(ip) => unsafe { GeoIP_record_by_ipnum_v6(self.db, ip) }
};
if cres.is_null() { return None; }
unsafe {
let city_info = CityInfo::from_geoiprecord(&*cres);
GeoIPRecord_delete(cres);
std::mem::forget(cres);
Some(city_info)
}
}
pub fn as_info_by_ip(&self, ip: IpAddr) -> Option<ASInfo> {
let mut gl = GeoIpLookup::new();
let cres = match CNetworkIp::new(ip) {
V4(ip) => unsafe { GeoIP_name_by_ipnum_gl(self.db, ip, &mut gl) },
V6(ip) => unsafe { GeoIP_name_by_ipnum_v6_gl(self.db, ip, &mut gl) }
};
if cres.is_null() {
return None;
}
let description_cstr = unsafe { CString::new(cres, true) };
let description = match description_cstr.as_str() {
None => return None,
Some(description) => description
};
let mut di = description.splitn(1, ' ');
let asn = match di.next() {
None => return None,
Some(asn) => {
if ! asn.starts_with("AS") {
return None
} else {
from_str::<uint>(asn.slice_from(2)).unwrap()
}
}
};
let name = di.next().unwrap_or("(none)");
let as_info = ASInfo {
asn: asn,
name: name.to_string(),
netmask: gl.netmask as uint
};
Some(as_info)
}
}
impl Drop for GeoIp {
fn drop(&mut self) {
unsafe {
GeoIP_delete(self.db);
}
}
}
#[test]
fn geoip_test_basic() {
let geoip = match GeoIp::open(&from_str("/opt/geoip/GeoIPASNum.dat").unwrap(), MemoryCache) {
Err(err) => panic!(err),
Ok(geoip) => geoip
};
let ip = from_str("91.203.184.192").unwrap();
let res = geoip.as_info_by_ip(ip).unwrap();
assert!(res.asn == 41064);
assert!(res.name.as_slice().contains("Telefun"));
assert!(res.netmask == 22);
}
#[test]
fn geoip_test_city() {
let geoip = match GeoIp::open(&from_str("/opt/geoip/GeoLiteCity.dat").unwrap(), MemoryCache) {
Err(err) => panic!(err),
Ok(geoip) => geoip
};
let ip = from_str("8.8.8.8").unwrap();
let res = geoip.city_info_by_ip(ip).unwrap();
assert!(res.city.unwrap().as_slice() == "Mountain View");
}
|
// Copyright 2018-2019 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
// TODO: change this back to `clippy::cognitive_complexity` when Clippy stable
// deprecates `clippy::cyclomatic_complexity`.
#![allow(clippy::complexity)]
use std::{
fs,
path::Path,
str,
sync::{Arc, RwLock},
thread,
};
use byteorder::{ByteOrder, LittleEndian};
use tempfile::Builder;
use rkv::{
backend::{
BackendEnvironmentBuilder, BackendInfo, BackendStat, Lmdb, LmdbDatabase, LmdbEnvironment,
LmdbRwTransaction,
},
EnvironmentFlags, Rkv, SingleStore, StoreError, StoreOptions, Value, Writer,
};
fn check_rkv(k: &Rkv<LmdbEnvironment>) {
let _ = k
.open_single(None, StoreOptions::create())
.expect("created default");
let s = k.open_single("s", StoreOptions::create()).expect("opened");
let reader = k.read().expect("reader");
let result = s.get(&reader, "foo");
assert_eq!(None, result.expect("success but no value"));
}
// The default size is 1MB.
const DEFAULT_SIZE: usize = 1024 * 1024;
/// We can't open a directory that doesn't exist.
#[test]
fn test_open_fails() {
let root = Builder::new()
.prefix("test_open_fails")
.tempdir()
.expect("tempdir");
assert!(root.path().exists());
let nope = root.path().join("nope/");
assert!(!nope.exists());
let pb = nope.to_path_buf();
match Rkv::new::<Lmdb>(nope.as_path()).err() {
Some(StoreError::UnsuitableEnvironmentPath(p)) => {
assert_eq!(pb, p);
}
_ => panic!("expected error"),
};
}
#[test]
fn test_open() {
let root = Builder::new()
.prefix("test_open")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
check_rkv(&k);
}
#[test]
fn test_open_from_builder() {
let root = Builder::new()
.prefix("test_open_from_builder")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(2);
let k = Rkv::from_builder(root.path(), builder).expect("rkv");
check_rkv(&k);
}
#[test]
fn test_open_from_builder_with_no_subdir_1() {
let root = Builder::new()
.prefix("test_open_from_builder")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
{
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(2);
let k = Rkv::from_builder(root.path(), builder).expect("rkv");
check_rkv(&k);
}
{
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_flags(EnvironmentFlags::NO_SUB_DIR);
builder.set_max_dbs(2);
let mut datamdb = root.path().to_path_buf();
datamdb.push("data.mdb");
let k = Rkv::from_builder(&datamdb, builder).expect("rkv");
check_rkv(&k);
}
}
#[test]
#[should_panic(expected = "rkv: UnsuitableEnvironmentPath")]
fn test_open_from_builder_with_no_subdir_2() {
let root = Builder::new()
.prefix("test_open_from_builder")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
{
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(2);
let k = Rkv::from_builder(root.path(), builder).expect("rkv");
check_rkv(&k);
}
{
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_flags(EnvironmentFlags::NO_SUB_DIR);
builder.set_max_dbs(2);
let mut datamdb = root.path().to_path_buf();
datamdb.push("bogus.mdb");
let k = Rkv::from_builder(&datamdb, builder).expect("rkv");
check_rkv(&k);
}
}
#[test]
fn test_open_from_builder_with_dir_1() {
let root = Builder::new()
.prefix("test_open_from_builder")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(2);
builder.set_make_dir_if_needed(true);
let k = Rkv::from_builder(root.path(), builder).expect("rkv");
check_rkv(&k);
}
#[test]
#[should_panic(expected = "rkv: UnsuitableEnvironmentPath(\"bogus\")")]
fn test_open_from_builder_with_dir_2() {
let root = Path::new("bogus");
println!("Root path: {root:?}");
assert!(!root.is_dir());
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(2);
let k = Rkv::from_builder(root, builder).expect("rkv");
check_rkv(&k);
}
#[test]
#[should_panic(expected = "opened: DbsFull")]
fn test_create_with_capacity_1() {
let root = Builder::new()
.prefix("test_create_with_capacity")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 1).expect("rkv");
check_rkv(&k);
// This errors with "opened: DbsFull" because we specified a capacity of one (database),
// and check_rkv already opened one (plus the default database, which doesn't count
// against the limit).
let _zzz = k
.open_single("zzz", StoreOptions::create())
.expect("opened");
}
#[test]
fn test_create_with_capacity_2() {
let root = Builder::new()
.prefix("test_create_with_capacity")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 1).expect("rkv");
check_rkv(&k);
// This doesn't error with "opened: DbsFull" with because even though we specified a
// capacity of one (database), and check_rkv already opened one, the default database
// doesn't count against the limit.
let _zzz = k.open_single(None, StoreOptions::create()).expect("opened");
}
#[test]
#[should_panic(expected = "opened: DbsFull")]
fn test_open_with_capacity_1() {
let root = Builder::new()
.prefix("test_open_with_capacity")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 1).expect("rkv");
check_rkv(&k);
let _zzz = k
.open_single("zzz", StoreOptions::default())
.expect("opened");
}
#[test]
fn test_open_with_capacity_2() {
let root = Builder::new()
.prefix("test_open_with_capacity")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 1).expect("rkv");
check_rkv(&k);
let _zzz = k
.open_single(None, StoreOptions::default())
.expect("opened");
}
#[test]
fn test_list_dbs_1() {
let root = Builder::new()
.prefix("test_list_dbs")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 1).expect("rkv");
check_rkv(&k);
let dbs = k.get_dbs().unwrap();
assert_eq!(dbs, vec![Some("s".to_owned())]);
}
#[test]
fn test_list_dbs_2() {
let root = Builder::new()
.prefix("test_list_dbs")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 2).expect("rkv");
check_rkv(&k);
let _ = k
.open_single("zzz", StoreOptions::create())
.expect("opened");
let dbs = k.get_dbs().unwrap();
assert_eq!(dbs, vec![Some("s".to_owned()), Some("zzz".to_owned())]);
}
#[test]
fn test_list_dbs_3() {
let root = Builder::new()
.prefix("test_list_dbs")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::with_capacity::<Lmdb>(root.path(), 0).expect("rkv");
let _ = k.open_single(None, StoreOptions::create()).expect("opened");
let dbs = k.get_dbs().unwrap();
assert_eq!(dbs, vec![None]);
}
fn get_larger_than_default_map_size_value() -> usize {
// The LMDB C library and lmdb Rust crate docs for setting the map size
// <http://www.lmdb.tech/doc/group__mdb.html#gaa2506ec8dab3d969b0e609cd82e619e5>
// <https://docs.rs/lmdb/0.8.0/lmdb/struct.EnvironmentBuilder.html#method.set_map_size>
// both say that the default map size is 10,485,760 bytes, i.e. 10MiB.
//
// But the DEFAULT_MAPSIZE define in the LMDB code
// https://github.com/LMDB/lmdb/blob/26c7df88e44e31623d0802a564f24781acdefde3/libraries/liblmdb/mdb.c#L729
// sets the default map size to 1,048,576 bytes, i.e. 1MiB.
//
DEFAULT_SIZE + 1 /* 1,048,576 + 1 bytes, i.e. 1MiB + 1 byte */
}
#[test]
#[should_panic(expected = "wrote: MapFull")]
fn test_exceed_map_size() {
let root = Builder::new()
.prefix("test_exceed_map_size")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k
.open_single("test", StoreOptions::create())
.expect("opened");
// Writing a large enough value should cause LMDB to fail on MapFull.
// We write a string that is larger than the default map size.
let val = "x".repeat(get_larger_than_default_map_size_value());
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str(&val))
.expect("wrote");
}
#[test]
#[should_panic(expected = "wrote: KeyValuePairBadSize")]
fn test_exceed_key_size_limit() {
let root = Builder::new()
.prefix("test_exceed_key_size_limit")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k
.open_single("test", StoreOptions::create())
.expect("opened");
let key = "k".repeat(512);
let mut writer = k.write().expect("writer");
sk.put(&mut writer, key, &Value::Str("val")).expect("wrote");
}
#[test]
fn test_increase_map_size() {
let root = Builder::new()
.prefix("test_open_with_map_size")
.tempdir()
.expect("tempdir");
println!("Root path: {:?}", root.path());
fs::create_dir_all(root.path()).expect("dir created");
assert!(root.path().is_dir());
let mut builder = Rkv::environment_builder::<Lmdb>();
// Set the map size to the size of the value we'll store in it + 100KiB,
// which ensures that there's enough space for the value and metadata.
builder.set_map_size(
get_larger_than_default_map_size_value() + 100 * 1024, /* 100KiB */
);
builder.set_max_dbs(2);
let k = Rkv::from_builder(root.path(), builder).unwrap();
let sk = k
.open_single("test", StoreOptions::create())
.expect("opened");
let val = "x".repeat(get_larger_than_default_map_size_value());
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str(&val))
.expect("wrote");
writer.commit().expect("committed");
let reader = k.read().unwrap();
assert_eq!(
sk.get(&reader, "foo").expect("read"),
Some(Value::Str(&val))
);
}
#[test]
fn test_round_trip_and_transactions() {
let root = Builder::new()
.prefix("test_round_trip_and_transactions")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
{
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
sk.put(&mut writer, "noo", &Value::F64(1234.0.into()))
.expect("wrote");
sk.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
sk.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
assert_eq!(
sk.get(&writer, "foo").expect("read"),
Some(Value::I64(1234))
);
assert_eq!(
sk.get(&writer, "noo").expect("read"),
Some(Value::F64(1234.0.into()))
);
assert_eq!(
sk.get(&writer, "bar").expect("read"),
Some(Value::Bool(true))
);
assert_eq!(
sk.get(&writer, "baz").expect("read"),
Some(Value::Str("héllo, yöu"))
);
// Isolation. Reads won't return values.
let r = &k.read().unwrap();
assert_eq!(sk.get(r, "foo").expect("read"), None);
assert_eq!(sk.get(r, "bar").expect("read"), None);
assert_eq!(sk.get(r, "baz").expect("read"), None);
}
// Dropped: tx rollback. Reads will still return nothing.
{
let r = &k.read().unwrap();
assert_eq!(sk.get(r, "foo").expect("read"), None);
assert_eq!(sk.get(r, "bar").expect("read"), None);
assert_eq!(sk.get(r, "baz").expect("read"), None);
}
{
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
sk.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
sk.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
assert_eq!(
sk.get(&writer, "foo").expect("read"),
Some(Value::I64(1234))
);
assert_eq!(
sk.get(&writer, "bar").expect("read"),
Some(Value::Bool(true))
);
assert_eq!(
sk.get(&writer, "baz").expect("read"),
Some(Value::Str("héllo, yöu"))
);
writer.commit().expect("committed");
}
// Committed. Reads will succeed.
{
let r = k.read().unwrap();
assert_eq!(sk.get(&r, "foo").expect("read"), Some(Value::I64(1234)));
assert_eq!(sk.get(&r, "bar").expect("read"), Some(Value::Bool(true)));
assert_eq!(
sk.get(&r, "baz").expect("read"),
Some(Value::Str("héllo, yöu"))
);
}
{
let mut writer = k.write().expect("writer");
sk.delete(&mut writer, "foo").expect("deleted");
sk.delete(&mut writer, "bar").expect("deleted");
sk.delete(&mut writer, "baz").expect("deleted");
assert_eq!(sk.get(&writer, "foo").expect("read"), None);
assert_eq!(sk.get(&writer, "bar").expect("read"), None);
assert_eq!(sk.get(&writer, "baz").expect("read"), None);
// Isolation. Reads still return values.
let r = k.read().unwrap();
assert_eq!(sk.get(&r, "foo").expect("read"), Some(Value::I64(1234)));
assert_eq!(sk.get(&r, "bar").expect("read"), Some(Value::Bool(true)));
assert_eq!(
sk.get(&r, "baz").expect("read"),
Some(Value::Str("héllo, yöu"))
);
}
// Dropped: tx rollback. Reads will still return values.
{
let r = k.read().unwrap();
assert_eq!(sk.get(&r, "foo").expect("read"), Some(Value::I64(1234)));
assert_eq!(sk.get(&r, "bar").expect("read"), Some(Value::Bool(true)));
assert_eq!(
sk.get(&r, "baz").expect("read"),
Some(Value::Str("héllo, yöu"))
);
}
{
let mut writer = k.write().expect("writer");
sk.delete(&mut writer, "foo").expect("deleted");
sk.delete(&mut writer, "bar").expect("deleted");
sk.delete(&mut writer, "baz").expect("deleted");
assert_eq!(sk.get(&writer, "foo").expect("read"), None);
assert_eq!(sk.get(&writer, "bar").expect("read"), None);
assert_eq!(sk.get(&writer, "baz").expect("read"), None);
writer.commit().expect("committed");
}
// Committed. Reads will succeed but return None to indicate a missing value.
{
let r = k.read().unwrap();
assert_eq!(sk.get(&r, "foo").expect("read"), None);
assert_eq!(sk.get(&r, "bar").expect("read"), None);
assert_eq!(sk.get(&r, "baz").expect("read"), None);
}
}
#[test]
fn test_single_store_clear() {
let root = Builder::new()
.prefix("test_single_store_clear")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
{
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
sk.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
sk.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
writer.commit().expect("committed");
}
{
let mut writer = k.write().expect("writer");
sk.clear(&mut writer).expect("cleared");
writer.commit().expect("committed");
}
{
let r = k.read().unwrap();
let iter = sk.iter_start(&r).expect("iter");
assert_eq!(iter.count(), 0);
}
}
#[test]
#[should_panic(expected = "KeyValuePairNotFound")]
fn test_single_store_delete_nonexistent() {
let root = Builder::new()
.prefix("test_single_store_delete_nonexistent")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
sk.delete(&mut writer, "bogus").unwrap();
}
#[test]
#[cfg(feature = "db-dup-sort")]
fn test_multi_put_get_del() {
let root = Builder::new()
.prefix("test_multi_put_get_del")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let multistore = k.open_multi("multistore", StoreOptions::create()).unwrap();
let mut writer = k.write().unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("str1 foo"))
.unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("str1 bar"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("str2 foo"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("str2 bar"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("str3 foo"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("str3 bar"))
.unwrap();
writer.commit().unwrap();
let writer = k.write().unwrap();
{
let mut iter = multistore.get(&writer, "str1").unwrap();
let (id, val) = iter.next().unwrap().unwrap();
assert_eq!((id, val), (&b"str1"[..], Value::Str("str1 bar")));
let (id, val) = iter.next().unwrap().unwrap();
assert_eq!((id, val), (&b"str1"[..], Value::Str("str1 foo")));
}
writer.commit().unwrap();
let mut writer = k.write().unwrap();
multistore
.delete(&mut writer, "str1", &Value::Str("str1 foo"))
.unwrap();
assert_eq!(
multistore.get_first(&writer, "str1").unwrap(),
Some(Value::Str("str1 bar"))
);
multistore
.delete(&mut writer, "str2", &Value::Str("str2 bar"))
.unwrap();
assert_eq!(
multistore.get_first(&writer, "str2").unwrap(),
Some(Value::Str("str2 foo"))
);
multistore.delete_all(&mut writer, "str3").unwrap();
assert_eq!(multistore.get_first(&writer, "str3").unwrap(), None);
writer.commit().unwrap();
}
#[test]
#[cfg(feature = "db-dup-sort")]
fn test_multiple_store_clear() {
let root = Builder::new()
.prefix("test_multiple_store_clear")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let multistore = k
.open_multi("multistore", StoreOptions::create())
.expect("opened");
{
let mut writer = k.write().expect("writer");
multistore
.put(&mut writer, "str1", &Value::Str("str1 foo"))
.unwrap();
multistore
.put(&mut writer, "str1", &Value::Str("str1 bar"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("str2 foo"))
.unwrap();
multistore
.put(&mut writer, "str2", &Value::Str("str2 bar"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("str3 foo"))
.unwrap();
multistore
.put(&mut writer, "str3", &Value::Str("str3 bar"))
.unwrap();
writer.commit().expect("committed");
}
{
let mut writer = k.write().expect("writer");
multistore.clear(&mut writer).expect("cleared");
writer.commit().expect("committed");
}
{
let r = k.read().unwrap();
assert_eq!(multistore.get_first(&r, "str1").expect("read"), None);
assert_eq!(multistore.get_first(&r, "str2").expect("read"), None);
assert_eq!(multistore.get_first(&r, "str3").expect("read"), None);
}
}
#[test]
fn test_open_store_for_read() {
let root = Builder::new()
.prefix("test_open_store_for_read")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
// First create the store, and start a write transaction on it.
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str("bar"))
.expect("write");
// Open the same store for read, note that the write transaction is still in progress,
// it should not block the reader though.
let sk_readonly = k
.open_single("sk", StoreOptions::default())
.expect("opened");
writer.commit().expect("commit");
// Now the write transaction is committed, any followed reads should see its change.
let reader = k.read().expect("reader");
assert_eq!(
sk_readonly.get(&reader, "foo").expect("read"),
Some(Value::Str("bar"))
);
}
#[test]
#[should_panic(expected = "open a missing store")]
fn test_open_a_missing_store() {
let root = Builder::new()
.prefix("test_open_a_missing_store")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let _sk = k
.open_single("sk", StoreOptions::default())
.expect("open a missing store");
}
#[test]
#[should_panic(expected = "new failed: FileInvalid")]
fn test_open_a_broken_store() {
let root = Builder::new()
.prefix("test_open_a_missing_store")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let dbfile = root.path().join("data.mdb");
fs::write(dbfile, "bogus").expect("dbfile created");
let _ = Rkv::new::<Lmdb>(root.path()).expect("new failed");
}
#[test]
fn test_open_fail_with_badrslot() {
let root = Builder::new()
.prefix("test_open_fail_with_badrslot")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
// First create the store
let _sk = k.open_single("sk", StoreOptions::create()).expect("opened");
// Open a reader on this store
let _reader = k.read().expect("reader");
// Open the same store for read while the reader is in progress will panic
let store = k.open_single("sk", StoreOptions::default());
match store {
Err(StoreError::OpenAttemptedDuringTransaction(_thread_id)) => (),
_ => panic!("should panic"),
}
}
#[test]
fn test_read_before_write_num() {
let root = Builder::new()
.prefix("test_read_before_write_num")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
// Test reading a number, modifying it, and then writing it back.
// We have to be done with the Value::I64 before calling Writer::put,
// as the Value::I64 borrows an immutable reference to the Writer.
// So we extract and copy its primitive value.
fn get_existing_foo(
store: SingleStore<LmdbDatabase>,
writer: &Writer<LmdbRwTransaction>,
) -> Option<i64> {
match store.get(writer, "foo").expect("read") {
Some(Value::I64(val)) => Some(val),
_ => None,
}
}
let mut writer = k.write().expect("writer");
let mut existing = get_existing_foo(sk, &writer).unwrap_or(99);
existing += 1;
sk.put(&mut writer, "foo", &Value::I64(existing))
.expect("success");
let updated = get_existing_foo(sk, &writer).unwrap_or(99);
assert_eq!(updated, 100);
writer.commit().expect("commit");
}
#[test]
fn test_read_before_write_str() {
let root = Builder::new()
.prefix("test_read_before_write_str")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
// Test reading a string, modifying it, and then writing it back.
// We have to be done with the Value::Str before calling Writer::put,
// as the Value::Str (and its underlying &str) borrows an immutable
// reference to the Writer. So we copy it to a String.
fn get_existing_foo(
store: SingleStore<LmdbDatabase>,
writer: &Writer<LmdbRwTransaction>,
) -> Option<String> {
match store.get(writer, "foo").expect("read") {
Some(Value::Str(val)) => Some(val.to_string()),
_ => None,
}
}
let mut writer = k.write().expect("writer");
let mut existing = get_existing_foo(sk, &writer).unwrap_or_default();
existing.push('…');
sk.put(&mut writer, "foo", &Value::Str(&existing))
.expect("write");
let updated = get_existing_foo(sk, &writer).unwrap_or_default();
assert_eq!(updated, "…");
writer.commit().expect("commit");
}
#[test]
fn test_concurrent_read_transactions_prohibited() {
let root = Builder::new()
.prefix("test_concurrent_reads_prohibited")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let _first = k.read().expect("reader");
let second = k.read();
match second {
Err(StoreError::ReadTransactionAlreadyExists(t)) => {
println!("Thread was {t:?}");
}
Err(e) => {
println!("Got error {e:?}");
}
_ => {
panic!("Expected error.");
}
}
}
#[test]
fn test_isolation() {
let root = Builder::new()
.prefix("test_isolation")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let s = k.open_single("s", StoreOptions::create()).expect("opened");
// Add one field.
{
let mut writer = k.write().expect("writer");
s.put(&mut writer, "foo", &Value::I64(1234)).expect("wrote");
writer.commit().expect("committed");
}
{
let reader = k.read().unwrap();
assert_eq!(s.get(&reader, "foo").expect("read"), Some(Value::I64(1234)));
}
// Establish a long-lived reader that outlasts a writer.
let reader = k.read().expect("reader");
assert_eq!(s.get(&reader, "foo").expect("read"), Some(Value::I64(1234)));
// Start a write transaction.
let mut writer = k.write().expect("writer");
s.put(&mut writer, "foo", &Value::I64(999)).expect("wrote");
// The reader and writer are isolated.
assert_eq!(s.get(&reader, "foo").expect("read"), Some(Value::I64(1234)));
assert_eq!(s.get(&writer, "foo").expect("read"), Some(Value::I64(999)));
// If we commit the writer, we still have isolation.
writer.commit().expect("committed");
assert_eq!(s.get(&reader, "foo").expect("read"), Some(Value::I64(1234)));
// A new reader sees the committed value. Note that LMDB doesn't allow two
// read transactions to exist in the same thread, so we abort the previous one.
reader.abort();
let reader = k.read().expect("reader");
assert_eq!(s.get(&reader, "foo").expect("read"), Some(Value::I64(999)));
}
#[test]
fn test_blob() {
let root = Builder::new()
.prefix("test_round_trip_blob")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
assert_eq!(sk.get(&writer, "foo").expect("read"), None);
sk.put(&mut writer, "foo", &Value::Blob(&[1, 2, 3, 4]))
.expect("wrote");
assert_eq!(
sk.get(&writer, "foo").expect("read"),
Some(Value::Blob(&[1, 2, 3, 4]))
);
fn u16_to_u8(src: &[u16]) -> Vec<u8> {
let mut dst = vec![0; 2 * src.len()];
LittleEndian::write_u16_into(src, &mut dst);
dst
}
fn u8_to_u16(src: &[u8]) -> Vec<u16> {
let mut dst = vec![0; src.len() / 2];
LittleEndian::read_u16_into(src, &mut dst);
dst
}
// When storing UTF-16 strings as blobs, we'll need to convert
// their [u16] backing storage to [u8]. Test that converting, writing,
// reading, and converting back works as expected.
let u16_array = [1000, 10000, 54321, 65535];
assert_eq!(sk.get(&writer, "bar").expect("read"), None);
sk.put(&mut writer, "bar", &Value::Blob(&u16_to_u8(&u16_array)))
.expect("wrote");
let u8_array = match sk.get(&writer, "bar").expect("read") {
Some(Value::Blob(val)) => val,
_ => &[],
};
assert_eq!(u8_to_u16(u8_array), u16_array);
}
#[test]
fn test_sync() {
let root = Builder::new()
.prefix("test_sync")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let mut builder = Rkv::environment_builder::<Lmdb>();
builder.set_max_dbs(1);
builder.set_flags(EnvironmentFlags::NO_SYNC);
{
let k = Rkv::from_builder(root.path(), builder).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
{
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
writer.commit().expect("committed");
k.sync(true).expect("synced");
}
}
let k = Rkv::from_builder(root.path(), builder).expect("new succeeded");
let sk = k
.open_single("sk", StoreOptions::default())
.expect("opened");
let reader = k.read().expect("reader");
assert_eq!(
sk.get(&reader, "foo").expect("read"),
Some(Value::I64(1234))
);
}
#[test]
#[cfg(feature = "db-int-key")]
fn test_stat() {
let root = Builder::new()
.prefix("test_stat")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
for i in 0..5 {
let sk = k
.open_integer(&format!("sk{i}")[..], StoreOptions::create())
.expect("opened");
{
let mut writer = k.write().expect("writer");
sk.put(&mut writer, i, &Value::I64(i64::from(i)))
.expect("wrote");
writer.commit().expect("committed");
}
}
assert_eq!(k.stat().expect("stat").depth(), 1);
assert_eq!(k.stat().expect("stat").entries(), 5);
assert_eq!(k.stat().expect("stat").branch_pages(), 0);
assert_eq!(k.stat().expect("stat").leaf_pages(), 1);
}
#[test]
fn test_info() {
let root = Builder::new()
.prefix("test_info")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str("bar"))
.expect("wrote");
writer.commit().expect("commited");
let info = k.info().expect("info");
// The default size is 1MB.
assert_eq!(info.map_size(), DEFAULT_SIZE);
// Should greater than 0 after the write txn.
assert!(info.last_pgno() > 0);
// A txn to open_single + a txn to write.
assert_eq!(info.last_txnid(), 2);
// The default max readers is 126.
assert_eq!(info.max_readers(), 126);
assert_eq!(info.num_readers(), 0);
// A new reader should increment the reader counter.
let _reader = k.read().expect("reader");
let info = k.info().expect("info");
assert_eq!(info.num_readers(), 1);
}
#[test]
fn test_load_ratio() {
let root = Builder::new()
.prefix("test_load_ratio")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str("bar"))
.expect("wrote");
writer.commit().expect("commited");
let ratio = k.load_ratio().expect("ratio").unwrap();
assert!(ratio > 0.0_f32 && ratio < 1.0_f32);
// Put data to database should increase the load ratio.
let mut writer = k.write().expect("writer");
sk.put(
&mut writer,
"bar",
&Value::Str(&"more-than-4KB".repeat(1000)),
)
.expect("wrote");
writer.commit().expect("commited");
let new_ratio = k.load_ratio().expect("ratio").unwrap();
assert!(new_ratio > ratio);
// Clear the database so that all the used pages should go to freelist, hence the ratio
// should decrease.
let mut writer = k.write().expect("writer");
sk.clear(&mut writer).expect("clear");
writer.commit().expect("commited");
let after_clear_ratio = k.load_ratio().expect("ratio").unwrap();
assert!(after_clear_ratio < new_ratio);
}
#[test]
fn test_set_map_size() {
let root = Builder::new()
.prefix("test_size_map_size")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
assert_eq!(k.info().expect("info").map_size(), DEFAULT_SIZE);
k.set_map_size(2 * DEFAULT_SIZE).expect("resized");
// Should be able to write.
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::Str("bar"))
.expect("wrote");
writer.commit().expect("commited");
assert_eq!(k.info().expect("info").map_size(), 2 * DEFAULT_SIZE);
}
#[test]
fn test_iter() {
let root = Builder::new()
.prefix("test_iter")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
// An iterator over an empty store returns no values.
{
let reader = k.read().unwrap();
let mut iter = sk.iter_start(&reader).unwrap();
assert!(iter.next().is_none());
}
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
sk.put(&mut writer, "noo", &Value::F64(1234.0.into()))
.expect("wrote");
sk.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
sk.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
sk.put(&mut writer, "héllò, töűrîst", &Value::Str("Emil.RuleZ!"))
.expect("wrote");
sk.put(&mut writer, "你好,遊客", &Value::Str("米克規則"))
.expect("wrote");
writer.commit().expect("committed");
let reader = k.read().unwrap();
// Reader.iter() returns (key, value) tuples ordered by key.
let mut iter = sk.iter_start(&reader).unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "bar");
assert_eq!(val, Value::Bool(true));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "baz");
assert_eq!(val, Value::Str("héllo, yöu"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "foo");
assert_eq!(val, Value::I64(1234));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "héllò, töűrîst");
assert_eq!(val, Value::Str("Emil.RuleZ!"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterators don't loop. Once one returns None, additional calls
// to its next() method will always return None.
assert!(iter.next().is_none());
// Reader.iter_from() begins iteration at the first key equal to
// or greater than the given key.
let mut iter = sk.iter_from(&reader, "moo").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Reader.iter_from() works as expected when the given key is a prefix
// of a key in the store.
let mut iter = sk.iter_from(&reader, "no").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
}
#[test]
fn test_iter_from_key_greater_than_existing() {
let root = Builder::new()
.prefix("test_iter_from_key_greater_than_existing")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let sk = k.open_single("sk", StoreOptions::create()).expect("opened");
let mut writer = k.write().expect("writer");
sk.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
sk.put(&mut writer, "noo", &Value::F64(1234.0.into()))
.expect("wrote");
sk.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
sk.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
writer.commit().expect("committed");
let reader = k.read().unwrap();
let mut iter = sk.iter_from(&reader, "nuu").unwrap();
assert!(iter.next().is_none());
}
#[test]
fn test_multiple_store_read_write() {
let root = Builder::new()
.prefix("test_multiple_store_read_write")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let s1 = k
.open_single("store_1", StoreOptions::create())
.expect("opened");
let s2 = k
.open_single("store_2", StoreOptions::create())
.expect("opened");
let s3 = k
.open_single("store_3", StoreOptions::create())
.expect("opened");
let mut writer = k.write().expect("writer");
s1.put(&mut writer, "foo", &Value::Str("bar"))
.expect("wrote");
s2.put(&mut writer, "foo", &Value::I64(123)).expect("wrote");
s3.put(&mut writer, "foo", &Value::Bool(true))
.expect("wrote");
assert_eq!(
s1.get(&writer, "foo").expect("read"),
Some(Value::Str("bar"))
);
assert_eq!(s2.get(&writer, "foo").expect("read"), Some(Value::I64(123)));
assert_eq!(
s3.get(&writer, "foo").expect("read"),
Some(Value::Bool(true))
);
writer.commit().expect("committed");
let reader = k.read().expect("unbound_reader");
assert_eq!(
s1.get(&reader, "foo").expect("read"),
Some(Value::Str("bar"))
);
assert_eq!(s2.get(&reader, "foo").expect("read"), Some(Value::I64(123)));
assert_eq!(
s3.get(&reader, "foo").expect("read"),
Some(Value::Bool(true))
);
reader.abort();
// test delete across multiple stores
let mut writer = k.write().expect("writer");
s1.delete(&mut writer, "foo").expect("deleted");
s2.delete(&mut writer, "foo").expect("deleted");
s3.delete(&mut writer, "foo").expect("deleted");
writer.commit().expect("committed");
let reader = k.read().expect("reader");
assert_eq!(s1.get(&reader, "key").expect("value"), None);
assert_eq!(s2.get(&reader, "key").expect("value"), None);
assert_eq!(s3.get(&reader, "key").expect("value"), None);
}
#[test]
fn test_multiple_store_iter() {
let root = Builder::new()
.prefix("test_multiple_store_iter")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let k = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let s1 = k
.open_single("store_1", StoreOptions::create())
.expect("opened");
let s2 = k
.open_single("store_2", StoreOptions::create())
.expect("opened");
let mut writer = k.write().expect("writer");
// Write to "s1"
s1.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
s1.put(&mut writer, "noo", &Value::F64(1234.0.into()))
.expect("wrote");
s1.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
s1.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
s1.put(&mut writer, "héllò, töűrîst", &Value::Str("Emil.RuleZ!"))
.expect("wrote");
s1.put(&mut writer, "你好,遊客", &Value::Str("米克規則"))
.expect("wrote");
// &mut writer to "s2"
s2.put(&mut writer, "foo", &Value::I64(1234))
.expect("wrote");
s2.put(&mut writer, "noo", &Value::F64(1234.0.into()))
.expect("wrote");
s2.put(&mut writer, "bar", &Value::Bool(true))
.expect("wrote");
s2.put(&mut writer, "baz", &Value::Str("héllo, yöu"))
.expect("wrote");
s2.put(&mut writer, "héllò, töűrîst", &Value::Str("Emil.RuleZ!"))
.expect("wrote");
s2.put(&mut writer, "你好,遊客", &Value::Str("米克規則"))
.expect("wrote");
writer.commit().expect("committed");
let reader = k.read().unwrap();
// Iterate through the whole store in "s1"
let mut iter = s1.iter_start(&reader).unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "bar");
assert_eq!(val, Value::Bool(true));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "baz");
assert_eq!(val, Value::Str("héllo, yöu"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "foo");
assert_eq!(val, Value::I64(1234));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "héllò, töűrîst");
assert_eq!(val, Value::Str("Emil.RuleZ!"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterate through the whole store in "s2"
let mut iter = s2.iter_start(&reader).unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "bar");
assert_eq!(val, Value::Bool(true));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "baz");
assert_eq!(val, Value::Str("héllo, yöu"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "foo");
assert_eq!(val, Value::I64(1234));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "héllò, töűrîst");
assert_eq!(val, Value::Str("Emil.RuleZ!"));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterate from a given key in "s1"
let mut iter = s1.iter_from(&reader, "moo").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterate from a given key in "s2"
let mut iter = s2.iter_from(&reader, "moo").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterate from a given prefix in "s1"
let mut iter = s1.iter_from(&reader, "no").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
// Iterate from a given prefix in "s2"
let mut iter = s2.iter_from(&reader, "no").unwrap();
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "noo");
assert_eq!(val, Value::F64(1234.0.into()));
let (key, val) = iter.next().unwrap().unwrap();
assert_eq!(str::from_utf8(key).expect("key"), "你好,遊客");
assert_eq!(val, Value::Str("米克規則"));
assert!(iter.next().is_none());
}
#[test]
fn test_store_multiple_thread() {
let root = Builder::new()
.prefix("test_multiple_thread")
.tempdir()
.expect("tempdir");
fs::create_dir_all(root.path()).expect("dir created");
let rkv_arc = Arc::new(RwLock::new(
Rkv::new::<Lmdb>(root.path()).expect("new succeeded"),
));
let store = rkv_arc
.read()
.unwrap()
.open_single("test", StoreOptions::create())
.expect("opened");
let num_threads = 10;
let mut write_handles = Vec::with_capacity(num_threads as usize);
let mut read_handles = Vec::with_capacity(num_threads as usize);
// Note that this isn't intended to demonstrate a good use of threads.
// For this shape of data, it would be more performant to write/read
// all values using one transaction in a single thread. The point here
// is just to confirm that a store can be shared by multiple threads.
// For each KV pair, spawn a thread that writes it to the store.
for i in 0..num_threads {
let rkv_arc = rkv_arc.clone();
write_handles.push(thread::spawn(move || {
let rkv = rkv_arc.write().expect("rkv");
let mut writer = rkv.write().expect("writer");
store
.put(&mut writer, i.to_string(), &Value::U64(i))
.expect("written");
writer.commit().unwrap();
}));
}
for handle in write_handles {
handle.join().expect("joined");
}
// For each KV pair, spawn a thread that reads it from the store
// and returns its value.
for i in 0..num_threads {
let rkv_arc = rkv_arc.clone();
read_handles.push(thread::spawn(move || {
let rkv = rkv_arc.read().expect("rkv");
let reader = rkv.read().expect("reader");
let value = match store.get(&reader, i.to_string()) {
Ok(Some(Value::U64(value))) => value,
Ok(Some(_)) => panic!("value type unexpected"),
Ok(None) => panic!("value not found"),
Err(err) => panic!("{}", err),
};
assert_eq!(value, i);
value
}));
}
// Sum the values returned from the threads and confirm that they're
// equal to the sum of values written to the threads.
let thread_sum: u64 = read_handles
.into_iter()
.map(|handle| handle.join().expect("value"))
.sum();
assert_eq!(thread_sum, (0..num_threads).sum());
}
#[test]
fn test_use_value_as_key() {
let root = Builder::new()
.prefix("test_use_value_as_key")
.tempdir()
.expect("tempdir");
let rkv = Rkv::new::<Lmdb>(root.path()).expect("new succeeded");
let store = rkv
.open_single("store", StoreOptions::create())
.expect("opened");
{
let mut writer = rkv.write().expect("writer");
store
.put(&mut writer, "foo", &Value::Str("bar"))
.expect("wrote");
store
.put(&mut writer, "bar", &Value::Str("baz"))
.expect("wrote");
writer.commit().expect("committed");
}
// It's possible to retrieve a value with a Reader and then use it
// as a key with a Writer.
{
let reader = &rkv.read().unwrap();
if let Some(Value::Str(key)) = store.get(reader, "foo").expect("read") {
let mut writer = rkv.write().expect("writer");
store.delete(&mut writer, key).expect("deleted");
writer.commit().expect("committed");
}
}
{
let mut writer = rkv.write().expect("writer");
store
.put(&mut writer, "bar", &Value::Str("baz"))
.expect("wrote");
writer.commit().expect("committed");
}
// You can also retrieve a Value with a Writer and then use it as a key
// with the same Writer if you copy the value to an owned type
// so the Writer isn't still being borrowed by the retrieved value
// when you try to borrow the Writer again to modify that value.
{
let mut writer = rkv.write().expect("writer");
if let Some(Value::Str(value)) = store.get(&writer, "foo").expect("read") {
let key = value.to_owned();
store.delete(&mut writer, key).expect("deleted");
writer.commit().expect("committed");
}
}
{
let name1 = rkv
.open_single("name1", StoreOptions::create())
.expect("opened");
let name2 = rkv
.open_single("name2", StoreOptions::create())
.expect("opened");
let mut writer = rkv.write().expect("writer");
name1
.put(&mut writer, "key1", &Value::Str("bar"))
.expect("wrote");
name1
.put(&mut writer, "bar", &Value::Str("baz"))
.expect("wrote");
name2
.put(&mut writer, "key2", &Value::Str("bar"))
.expect("wrote");
name2
.put(&mut writer, "bar", &Value::Str("baz"))
.expect("wrote");
writer.commit().expect("committed");
}
// You can also iterate (store, key) pairs to retrieve foreign keys,
// then iterate those foreign keys to modify/delete them.
//
// You need to open the stores in advance, since opening a store
// uses a write transaction internally, so opening them while a writer
// is extant will hang.
//
// And you need to copy the values to an owned type so the Writer isn't
// still being borrowed by a retrieved value when you try to borrow
// the Writer again to modify another value.
let fields = vec![
(
rkv.open_single("name1", StoreOptions::create())
.expect("opened"),
"key1",
),
(
rkv.open_single("name2", StoreOptions::create())
.expect("opened"),
"key2",
),
];
{
let mut foreignkeys = Vec::new();
let mut writer = rkv.write().expect("writer");
for (store, key) in fields.iter() {
if let Some(Value::Str(value)) = store.get(&writer, key).expect("read") {
foreignkeys.push((store, value.to_owned()));
}
}
for (store, key) in foreignkeys.iter() {
store.delete(&mut writer, key).expect("deleted");
}
writer.commit().expect("committed");
}
}
|
use super::Opt;
use crate::advisory::Advisory;
use crate::package::Package;
use colored::*;
use git2::Repository;
use regex::Regex;
use smallstr::SmallString;
use snafu::{ensure, ResultExt, Snafu};
use std::collections::HashMap;
use std::fmt;
use std::fs::{create_dir, File};
use std::io::{self, Write};
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use subprocess::{Exec, PopenError};
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Cannot recognize branch specification '{}'", spec))]
InvalidBranchSpec { spec: String },
#[snafu(display("Branch name '{}' is not unique", name))]
DuplicateBranch { name: String },
#[snafu(display("Cannot execute git"))]
GitError { source: PopenError },
#[snafu(display("Cannot check out revision '{}'", rev))]
CheckoutError { rev: String },
#[snafu(display("Cannot execute nix-instantiate"))]
NixInstantiateError { source: PopenError },
#[snafu(display("nix-instantiate output does not start with /nix/store: {}", prefix))]
StorePrefixError { prefix: String },
#[snafu(display("Cannot instantiate derivation"))]
InstantiateError,
#[snafu(display("Output of nix-instantiate is empty!"))]
EmptyInstantiateError,
#[snafu(display("Cannot execute '{}'", vulnix.display()))]
VulnixExecError { vulnix: PathBuf, source: PopenError },
#[snafu(display("Cannot parse vulnix JSON output"))]
JSONParseError { source: serde_json::error::Error },
#[snafu(display("Cannot create vulnix JSON"))]
JSONSerError { source: serde_json::error::Error },
#[snafu(display("Cannot save vulnix results to '{}'", fname.display()))]
SaveError { fname: PathBuf, source: io::Error },
#[snafu(display("git error while resolving rev '{}'", rev))]
RevError { rev: String, source: git2::Error },
#[snafu(display("Cannot open git repository"))]
RepositoryError { source: git2::Error },
#[snafu(display("Cannot open saved vulnix results '{}'", fname.display()))]
OpenError { fname: PathBuf, source: io::Error },
}
type Result<T, E = Error> = std::result::Result<T, E>;
/// vulnix scan result item. vulnix output consists of a Vec of these.
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
pub struct VulnixRes {
#[serde(rename = "name")]
pub pkg: Package,
pub affected_by: Vec<Advisory>,
}
pub type ScanByBranch = HashMap<Branch, Vec<VulnixRes>>;
/// NixOS release to scan. Note that the git rev/branch may have a different name than the release
/// name we publish.
#[derive(Debug, Clone, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct Branch {
/// NixOS release name to publish in tickets
pub name: SmallString<[u8; 16]>,
/// git parseable revspec, usually a branch name
pub rev: SmallString<[u8; 16]>,
}
impl Branch {
pub fn new(name: &str) -> Self {
Self {
name: SmallString::from_str(name),
rev: SmallString::from_str(name),
}
}
pub fn checkout(&self, repo: &Path) -> Result<()> {
println!(
"{} {} @ {}",
"* Checking out".green().bold(),
self.name.green().bold(),
self.rev[0..11].yellow()
);
let status = Exec::cmd("git")
.args(&["checkout", "-q", &self.rev])
.cwd(repo)
.join()
.context(GitError {})?;
ensure!(
status.success(),
CheckoutError {
rev: self.rev.to_string()
}
);
Ok(())
}
/// Creates release derivation. Returns path to derivation file.
pub fn instantiate(&self, repo: &Path) -> Result<PathBuf> {
println!("{}", "* Instantiating...".green());
let cmd = Exec::cmd("nix-instantiate")
.args(&["--quiet", "-I", "nixpkgs=.", "nixos/release-combined.nix"])
.env("GC_INITIAL_HEAP_SIZE", "4G")
.cwd(repo);
println!("{}", cmd.to_cmdline_lossy().purple());
let cap = cmd.capture().context(NixInstantiateError {})?;
ensure!(cap.success(), InstantiateError);
if let Some(first) = cap.stdout_str().lines().nth(0) {
ensure!(
first.starts_with("/nix/store"),
StorePrefixError { prefix: first }
);
let drv = PathBuf::from(first.trim_end());
println!("{}", drv.display());
Ok(drv)
} else {
Err(Error::EmptyInstantiateError)
}
}
/// Full path to JSON file containing vulnix scan results
pub fn vulnix_json(&self, dir: &Path) -> PathBuf {
dir.join(format!("vulnix.{}.json", self.name.as_str()))
}
/// Invokes `vulnix` on a derivation
fn vulnix(&self, drv: PathBuf, opt: &Opt) -> Result<Vec<VulnixRes>> {
println!("{}", "* Scanning...".green());
let full_url = format!("{}/{}.toml", opt.whitelist_url, self.name.as_str());
let full_wl = opt
.whitelist_dir
.join(format!("{}.toml", self.name.as_str()));
let cmd = Exec::cmd(&opt.vulnix)
.args(&["-j", "-w", &full_url, "-W"])
.args(&[full_wl, drv]);
println!("{}", cmd.to_cmdline_lossy().purple());
let rdr = cmd.stream_stdout().context(VulnixExecError {
vulnix: &opt.vulnix,
})?;
serde_json::from_reader(rdr).context(JSONParseError)
}
/// Writes scan results into a JSON file for subsequent reference
fn save(&self, scan: &[VulnixRes], dir: &Path) -> Result<()> {
let fname = self.vulnix_json(dir);
let mut f = File::create(&fname).context(SaveError { fname: &fname })?;
serde_json::to_writer(f.try_clone().unwrap(), &scan).context(JSONSerError)?;
writeln!(f).context(SaveError { fname })
}
}
lazy_static! {
static ref BRANCHSPEC: Regex = Regex::new(r"^([^/=[:space:]]+)(=(\S+))?$").unwrap();
}
impl FromStr for Branch {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match BRANCHSPEC.captures(s) {
Some(cap) => {
let name = &cap[1];
match cap.get(3) {
Some(rev) => Ok(Branch {
name: name.into(),
rev: rev.as_str().into(),
}),
None => Ok(Branch::new(name)),
}
}
None => (InvalidBranchSpec { spec: s }).fail(),
}
}
}
impl fmt::Display for Branch {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.name.as_str())
}
}
fn resolve_rev(rev: &str, repo: &Repository) -> Result<String> {
Ok(repo
.revparse_single(rev)
.context(RevError { rev })?
.id()
.to_string())
}
#[derive(Default, Clone)]
pub struct Branches {
specs: Vec<Branch>,
repo: Option<PathBuf>,
}
impl Branches {
/// List of branches without git repository. Used when loading previous scan results
/// from directory.
pub fn init(specs: &[Branch]) -> Result<Self, Error> {
let b = Self {
specs: specs.to_owned(),
..Default::default()
};
for (idx, elem) in specs.iter().enumerate() {
if specs.iter().skip(idx + 1).any(|s| s.name == elem.name) {
return (DuplicateBranch {
name: elem.name.as_str(),
})
.fail();
}
}
Ok(b)
}
/// List of branches with associated git repository. Used when scanning from source.
pub fn with_repo(specs: &[Branch], repo: &Path) -> Result<Self> {
let mut bs = Branches {
repo: Some(repo.to_owned()),
..Branches::init(specs)?
};
let repo = Repository::open(repo).context(RepositoryError)?;
for mut b in bs.specs.iter_mut() {
b.rev = SmallString::from(resolve_rev(b.rev.as_str(), &repo)?);
}
Ok(bs)
}
/// Reads previous scan results from a directory
pub fn load(&self, dir: &Path) -> Result<ScanByBranch> {
println!(
"{} {}",
"* Loading scan results from".green(),
dir.display().to_string().yellow()
);
let mut sbb = ScanByBranch::new();
for branch in self.iter() {
let fname = branch.vulnix_json(dir);
let f = File::open(&fname).context(OpenError { fname })?;
sbb.insert(
branch.clone(),
serde_json::from_reader(f).context(JSONParseError)?,
);
}
Ok(sbb)
}
/// Checks out all specified branches in turn, instantiates the release derivation and invokes
/// vulnix on it.
pub fn scan(&self, opt: &Opt) -> Result<ScanByBranch> {
let repo = self
.repo
.as_ref()
.expect("Bug: attempted to scan unspecified repository");
let dir = opt.iterdir();
create_dir(&dir).ok();
let mut sbb = ScanByBranch::new();
for branch in self.iter() {
branch.checkout(repo)?;
let drv = branch.instantiate(repo)?;
let scan = branch.vulnix(drv, opt)?;
branch.save(&scan, &dir)?;
sbb.insert(branch.clone(), scan);
}
Ok(sbb)
}
}
impl Deref for Branches {
type Target = [Branch];
fn deref(&self) -> &Self::Target {
&self.specs
}
}
// === Tests ===
#[cfg(test)]
mod test {
use super::*;
use crate::tests::create_branches;
use libflate::gzip;
use std::error::Error;
use std::fs::read_to_string;
use tar::Archive;
use tempdir::TempDir;
#[test]
fn correct_branchspecs() {
assert_eq!(
Branch::from_str("nixos-18.09").unwrap(),
Branch {
name: "nixos-18.09".into(),
rev: "nixos-18.09".into()
}
);
assert_eq!(
Branch::from_str("nixos-18.09=55f4cd48").unwrap(),
Branch {
name: "nixos-18.09".into(),
rev: "55f4cd48".into()
}
);
assert_eq!(
Branch::from_str("nixos-18.09=origin/release-18.09").unwrap(),
Branch {
name: "nixos-18.09".into(),
rev: "origin/release-18.09".into()
}
);
}
#[test]
fn branchspec_invalid_chars() {
assert!(Branch::from_str("nixos 18.09").is_err());
assert!(Branch::from_str("origin/nixos-18.09").is_err());
}
#[test]
fn branchspec_empty() {
assert!(Branch::from_str("nixos=").is_err());
assert!(Branch::from_str("=abcdefg").is_err());
}
#[test]
fn no_duplicate_branch_names() {
assert!(create_branches(&["a", "b"]).is_ok());
assert!(create_branches(&["a", "b", "a"]).is_err());
}
#[test]
fn load_json() {
let dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("fixtures/iterations/1");
let branches = create_branches(&["nixos-18.03", "nixos-18.09", "nixos-unstable"]).unwrap();
let res = branches.load(&dir).unwrap();
assert_eq!(res.len(), 3);
assert_eq!(res[0].len(), 2);
assert_eq!(res[1].len(), 4);
assert_eq!(res[2].len(), 3);
}
#[test]
fn write_json() {
let dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("fixtures/iterations/1");
let branches = create_branches(&["nixos-18.09"]).unwrap();
let res = branches.load(&dir).unwrap();
let td = TempDir::new("test_write_json").unwrap();
branches[0].save(&res[0], td.path()).unwrap();
let reread: Vec<VulnixRes> =
serde_json::from_reader(File::open(td.path().join("vulnix.nixos-18.09.json")).unwrap())
.unwrap();
assert_eq!(res[0], reread);
}
/// Little shell script which unconditionally writes the contents of
/// fixtures/iterations/1/vulnix.nixos-18.09.json to stdout
fn fake_vulnix() -> PathBuf {
Path::new(env!("CARGO_MANIFEST_DIR")).join("fixtures/fake_vulnix")
}
#[test]
fn run_vulnix() {
let mut opt = Opt::default();
opt.vulnix = fake_vulnix();
opt.reldir = Path::new(env!("CARGO_MANIFEST_DIR")).join("fixtures/iterations");
opt.iteration = 1;
let b = Branch::new("nixos-18.09.json");
let res = b.vulnix(PathBuf::new(), &opt).unwrap();
let exp: Vec<VulnixRes> = serde_json::from_str(
&read_to_string(opt.iterdir().join("vulnix.nixos-18.09.json")).unwrap(),
)
.unwrap();
assert_eq!(res, exp);
}
#[test]
fn resolve_branches() -> Result<(), Box<dyn Error>> {
let tmp = TempDir::new("test_resolve_branches")?;
let tarball = Path::new(env!("CARGO_MANIFEST_DIR")).join("fixtures/repo.tar.gz");
Archive::new(gzip::Decoder::new(File::open(tarball)?)?).unpack(&tmp)?;
let repo = Repository::open(&tmp.path().join("repo"))?;
assert_eq!(
"117a41dff30a62f2e4ef68c7e237ed497150b6dd",
resolve_rev("117a41d", &repo)?
);
assert_eq!(
"8dfec1442bf901fbfc09572ae0ea58d5ce8b4462",
resolve_rev("master", &repo)?
);
assert_eq!(
"12fe4b957c99f41b0885021599b445ac4a02623a",
resolve_rev("b1", &repo)?
);
Ok(())
}
}
|
pub use self::matrix::*;
pub use self::matrix_4x4::*;
pub use self::matrix_result::*;
pub use self::matrix_support::*;
pub use self::quaternion::*;
pub use self::support::*;
pub use self::vector::*;
mod matrix;
mod matrix_result;
mod matrix_4x4;
mod matrix_support;
mod quaternion;
mod support;
mod vector;
/// Inverse functionality is necessary for quaternions and matrices.
pub trait Inverse {
type Output;
fn inv(&self) -> Self::Output;
} |
use std::io::{self, BufRead};
use std::slice::Iter;
#[derive(Debug, Clone, Copy)]
enum Token { Plus, Times, OpenParen, CloseParen, Num(u64) }
#[derive(Debug)]
enum Expr { Add(Box<Expr>, Box<Expr>), Multiply(Box<Expr>, Box<Expr>), Bracketed(Box<Expr>), Num(u64) }
fn lex(line : String) -> Vec<Token> {
let mut next_char : Option<char> = None;
let mut chars = line.chars();
let mut tokens : Vec<Token> = Vec::new();
while let Some(char) = next_char.or_else(|| chars.next()) {
next_char = None;
if char == '+' {
tokens.push(Token::Plus)
} else if char == '*' {
tokens.push(Token::Times)
} else if char == '(' {
tokens.push(Token::OpenParen)
} else if char == ')' {
tokens.push(Token::CloseParen)
} else if char == ' ' {
// nothing
} else if char.is_digit(10) {
let mut n : u64 = 0;
let mut current_char = Some(char);
while let Some(next_d) = current_char.and_then(|c| c.to_digit(10)) {
n = n * 10 + (next_d as u64);
current_char = chars.next();
}
tokens.push(Token::Num(n));
next_char = current_char;
}
}
tokens
}
fn parse_un(lexed : &mut Expectant) -> Expr {
if let Some(op) = next(lexed) {
match op {
Token::Num(n) => {
return Expr::Num(n);
},
Token::OpenParen => {
let expr = Expr::Bracketed(Box::new(parse(lexed)));
expect(lexed, |tok| match tok { Token::CloseParen => true, _ => false });
return expr;
},
_ => {
panic!("what.");
}
}
} else {
panic!("nhat.");
}
}
fn parse(lexed : &mut Expectant) -> Expr {
let lhs = parse_un(lexed);
return parse_bin(lhs, lexed);
}
fn parse_bin(lhs : Expr, lexed : &mut Expectant) -> Expr {
if expect(lexed, |tok| match tok { Token::Plus => true, _ => false }) {
let rhs = parse_un(lexed);
return parse_bin(Expr::Add(Box::new(lhs), Box::new(rhs)), lexed);
} else if expect(lexed, |tok| match tok { Token::Times => true, _ => false }) {
let rhs = parse_un(lexed);
return parse_bin(Expr::Multiply(Box::new(lhs), Box::new(rhs)), lexed);
}
return lhs;
}
// this is a utility to make the traditional "consume-next-token-if-it-looks-like-X" device
struct Expectant<'a> {
iter: &'a mut Iter<'a, Token>,
current_tok: Option<Token>
}
fn next(lexed : &mut Expectant) -> Option<Token> {
match lexed.current_tok {
Some(token) => {
lexed.current_tok = None;
return Some(token);
},
None => {
return lexed.iter.next().map(|t| *t);
}
}
}
fn expect<F>(lexed : &mut Expectant, f : F) -> bool
where F : FnOnce(Token) -> bool
{
if lexed.current_tok.is_none() {
lexed.current_tok = lexed.iter.next().map(|t| *t);
}
if let Some(tok) = lexed.current_tok {
if f(tok) {
lexed.current_tok = None;
return true;
} else {
return false;
}
} else {
return false;
}
}
fn parse2_n(lexed : &mut Expectant) -> Expr {
if let Some(tok) = next(lexed) {
match tok {
Token::Num(n) => {
return Expr::Num(n);
},
Token::OpenParen => {
let expr = Expr::Bracketed(Box::new(parse2_products(lexed)));
expect(lexed, |tok| match tok { Token::CloseParen => true, _ => false });
return expr;
},
_ => panic!("not a number")
}
}
panic!("no number");
}
fn parse2_next_n(lexed : &mut Expectant) -> Option<Expr> {
if expect(lexed, |tok| match tok { Token::Plus => true, _ => false }) {
return Some(parse2_sums(lexed));
}
return None;
}
fn parse2_sums(lexed : &mut Expectant) -> Expr {
let mut n = parse2_n(lexed);
while let Some(n2) = parse2_next_n(lexed) {
n = Expr::Add(Box::new(n), Box::new(n2));
}
return n;
}
fn parse2_next_sums(lexed : &mut Expectant) -> Option<Expr> {
if expect(lexed, |tok| match tok { Token::Times => true, _ => false }) {
return Some(parse2_products(lexed));
}
return None;
}
fn parse2_products(lexed : &mut Expectant) -> Expr {
let mut sum = parse2_sums(lexed);
while let Some(sum2) = parse2_next_sums(lexed) {
sum = Expr::Multiply(Box::new(sum), Box::new(sum2));
}
return sum;
}
fn eval(expr : Expr) -> u64 {
match expr {
Expr::Num(n) => n,
Expr::Bracketed(expr) => eval(*expr),
Expr::Add(lhs, rhs) => eval(*lhs) + eval(*rhs),
Expr::Multiply(lhs, rhs) => eval(*lhs) * eval(*rhs),
}
}
/* I thought doing something in an utterly unprincipled ad-hoc way
* might actually be faster. And then I thought it might be fun to
* try.
*
* Welcome to lol.
*/
#[derive(Debug, Clone)]
enum ExprLol { Bracketed(Vec<ExprLol>), Token(Token) }
fn unparen_lol(tokens : &mut Iter<Token>) -> Vec<ExprLol> {
let mut result = Vec::new();
while let Some(t) = tokens.next() {
match t {
Token::OpenParen => {
result.push(ExprLol::Bracketed(unparen_lol(tokens)));
},
Token::CloseParen => {
break;
},
_ => {
result.push(ExprLol::Token(*t));
}
}
}
return result;
}
fn eval_lol_at(tree : &Vec<ExprLol>, index : usize) -> u64 {
return match &tree[index] {
ExprLol::Token(Token::Num(num)) => *num,
ExprLol::Bracketed(subtree) => eval_lol(&subtree),
_ => panic!("lol")
};
}
fn eval_lol(tree : &Vec<ExprLol>) -> u64 {
let mut n = eval_lol_at(tree, 0);
let mut idx = 1;
while idx < tree.len() {
match tree[idx] {
ExprLol::Token(Token::Plus) => {
n += eval_lol_at(tree, idx + 1);
idx += 2;
},
ExprLol::Token(Token::Times) => {
n *= eval_lol_at(tree, idx + 1);
idx += 2;
},
_ => panic!("lol")
};
}
return n;
}
fn precedence_lol(tree : &Vec<ExprLol>) -> Vec<ExprLol> {
let mut results = Vec::new();
let mut index = 1;
let mut tree : Vec<ExprLol> = tree.iter().map(|t| match t {
ExprLol::Bracketed(nexts) => ExprLol::Bracketed(precedence_lol(&nexts)),
token => token.clone()
}).collect();
results.push(tree[0].clone());
while index < tree.len() {
let next = &tree[index + 1];
match &tree[index] {
ExprLol::Token(Token::Plus) => {
let last = results.pop().unwrap();
results.push(ExprLol::Bracketed(vec!(last, ExprLol::Token(Token::Plus), next.clone())));
},
ExprLol::Token(Token::Times) => {
results.push(tree[index].clone());
results.push(next.clone());
},
_ => panic!("lol")
}
index += 2;
}
return results;
}
/*
* </lol>
*/
fn main() {
let mut sum = 0;
let mut sum2 = 0;
let mut sumlol = 0;
let mut sum2lol = 0;
for wrapped_line in io::stdin().lock().lines() {
let line = wrapped_line.unwrap();
let lexed = lex(line.clone());
let mut ex2 = Expectant { iter: &mut lexed.iter(), current_tok: None };
let parsed = parse(&mut ex2);
let evaled = eval(parsed);
sum += evaled;
sumlol += eval_lol(&unparen_lol(&mut lexed.iter()));
let mut ex2 = Expectant { iter: &mut lexed.iter(), current_tok: None };
let parsed2 = parse2_products(&mut ex2);
let evaled2 = eval(parsed2);
sum2 += evaled2;
sum2lol += eval_lol(&precedence_lol(&unparen_lol(&mut lexed.iter())));
}
println!("{}", sum);
println!("{}", sum2);
println!("{}", sumlol);
println!("{}", sum2lol);
}
|
extern crate utils;
use std::env;
use std::collections::HashSet;
use std::str::FromStr;
use std::num::ParseIntError;
use std::io::{self, BufReader};
use std::io::prelude::*;
use std::fs::File;
use utils::*;
type Input = Vec<Instruction>;
#[derive(Eq, PartialEq, Copy, Clone, Debug)]
enum OpCode {
Nop, Acc, Jmp
}
#[derive(Copy, Clone, Debug)]
struct Instruction {
opcode: OpCode,
arg: i32
}
#[derive(Debug)]
struct Console <'a> {
instructions: &'a Vec<Instruction>,
pc: usize,
accumulator: i32
}
impl<'a> Console<'a> {
fn from_program(instructions: &'a Vec<Instruction>) -> Self {
Console { instructions, pc: 0, accumulator: 0 }
}
fn step(&mut self) {
let Instruction { opcode, arg } = self.instructions[self.pc];
match opcode {
OpCode::Nop => {
self.pc += 1;
},
OpCode::Acc => {
self.accumulator += arg;
self.pc += 1;
},
OpCode::Jmp => {
self.pc += arg as usize;
}
}
}
fn run_until_loop_or_end(&mut self) {
let mut visited = HashSet::new();
loop {
if visited.contains(&self.pc) || self.pc >= self.instructions.len() {
break;
}
visited.insert(self.pc);
self.step();
}
}
}
fn part1(input: &Input) -> i32 {
let mut console = Console::from_program(input);
console.run_until_loop_or_end();
console.accumulator
}
fn part2(input: &Input) -> i32 {
fn swap_opcode(opcode: OpCode) -> Option<OpCode> {
match opcode {
OpCode::Jmp => Some(OpCode::Nop),
OpCode::Nop => Some(OpCode::Jmp),
_ => None
}
}
let mut program = input.clone();
for i in 0..input.len() {
let old_instruction = program[i];
let Instruction { opcode, arg } = old_instruction;
if let Some(opcode) = swap_opcode(opcode) {
program[i] = Instruction { opcode, arg };
} else {
continue;
}
let mut console = Console::from_program(&program);
console.run_until_loop_or_end();
if console.pc >= input.len() {
return console.accumulator;
}
program[i] = old_instruction;
}
0
}
fn main() {
measure(|| {
let input = input().expect("Input failed");
println!("Part1: {}", part1(&input));
println!("Part2: {}", part2(&input));
});
}
impl FromStr for OpCode {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"nop" => Ok(OpCode::Nop),
"acc" => Ok(OpCode::Acc),
"jmp" => Ok(OpCode::Jmp),
_ => Err(())
}
}
}
impl FromStr for Instruction {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split_ascii_whitespace();
let opcode = parts.next().and_then(|s| s.parse::<OpCode>().ok()).unwrap();
let arg = parts.next().and_then(|s| s.parse::<i32>().ok()).unwrap();
Ok(Instruction { opcode, arg })
}
}
fn read_input<R: Read>(reader: BufReader<R>) -> io::Result<Input> {
Ok(reader.lines().map(|l| l.unwrap().parse::<Instruction>().unwrap()).collect())
}
fn input() -> io::Result<Input> {
let f = File::open(env::args().skip(1).next().expect("No input file given"))?;
read_input(BufReader::new(f))
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &'static str =
"nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6";
fn as_input(s: &str) -> Input {
read_input(BufReader::new(s.split('\n').map(|s| s.trim()).collect::<Vec<_>>().join("\n").as_bytes())).unwrap()
}
#[test]
fn test_part1() {
assert_eq!(part1(&as_input(INPUT)), 5);
}
#[test]
fn test_part2() {
assert_eq!(part2(&as_input(INPUT)), 8);
}
}
|
use std::collections::HashSet;
use std::iter::FromIterator;
use general::TryReader;
use sourcecode::Code;
use sourcecode::Span;
use token::Operator;
use token::Token;
use parse::SyntaxTree;
use parse::BinaryOperation;
use parse::Add;
pub struct Relational {
binary_operation: BinaryOperation<Add>,
}
impl Relational {
pub fn head(&self) -> &Add {
self.binary_operation.head()
}
pub fn tail(&self) -> impl Iterator<Item = (&Code<Operator>, &Add)> {
self.binary_operation.tail()
}
fn operators() -> HashSet<Operator> {
HashSet::from_iter(vec![
Operator::Less,
Operator::LessEq,
Operator::Greater,
Operator::GreaterEq,
].into_iter())
}
}
impl SyntaxTree for Relational {
fn parse(mut token_reader: &mut TryReader<Code<Token>>)
-> Result<Relational, (Option<Span>, String)> {
BinaryOperation::parse(&mut token_reader, &Self::operators())
.map(|binary_operation| Relational {binary_operation})
}
fn span(&self) -> Span {
self.binary_operation.span()
}
}
#[cfg(test)]
mod tests {
use super::*;
use token::tokenize;
#[test]
fn test_parse_relational() {
let src = "3 < 5 <= 1";
let tokens = tokenize(&src.to_string()).unwrap();
let mut token_reader = TryReader::new(&tokens);
let relational = Relational::parse(&mut token_reader).unwrap();
let mut tail = relational.tail();
assert_eq!(tail.next().unwrap().0.value, Operator::Less);
assert_eq!(tail.next().unwrap().0.value, Operator::LessEq);
}
} |
pub mod client;
pub mod cmd;
pub mod ftpio;
pub mod util; |
#[doc = "Reader of register WAIT_CTL"]
pub type R = crate::R<u32, super::WAIT_CTL>;
#[doc = "Writer for register WAIT_CTL"]
pub type W = crate::W<u32, super::WAIT_CTL>;
#[doc = "Register WAIT_CTL `reset()`'s with value 0x0003_0b09"]
impl crate::ResetValue for super::WAIT_CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x0003_0b09
}
}
#[doc = "Reader of field `WAIT_FM_MEM_RD`"]
pub type WAIT_FM_MEM_RD_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WAIT_FM_MEM_RD`"]
pub struct WAIT_FM_MEM_RD_W<'a> {
w: &'a mut W,
}
impl<'a> WAIT_FM_MEM_RD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Reader of field `WAIT_FM_HV_RD`"]
pub type WAIT_FM_HV_RD_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WAIT_FM_HV_RD`"]
pub struct WAIT_FM_HV_RD_W<'a> {
w: &'a mut W,
}
impl<'a> WAIT_FM_HV_RD_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);
self.w
}
}
#[doc = "Reader of field `WAIT_FM_HV_WR`"]
pub type WAIT_FM_HV_WR_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WAIT_FM_HV_WR`"]
pub struct WAIT_FM_HV_WR_W<'a> {
w: &'a mut W,
}
impl<'a> WAIT_FM_HV_WR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 16)) | (((value as u32) & 0x07) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Number of C interface wait cycles (on 'clk_c') for a read from the memory"]
#[inline(always)]
pub fn wait_fm_mem_rd(&self) -> WAIT_FM_MEM_RD_R {
WAIT_FM_MEM_RD_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 8:11 - Number of C interface wait cycles (on 'clk_c') for a read from the high Voltage page latches. Common for reading HV Page Latches and the DATA_COMP_RESULT bit"]
#[inline(always)]
pub fn wait_fm_hv_rd(&self) -> WAIT_FM_HV_RD_R {
WAIT_FM_HV_RD_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 16:18 - Number of C interface wait cycles (on 'clk_c') for a write to the high Voltage page latches."]
#[inline(always)]
pub fn wait_fm_hv_wr(&self) -> WAIT_FM_HV_WR_R {
WAIT_FM_HV_WR_R::new(((self.bits >> 16) & 0x07) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Number of C interface wait cycles (on 'clk_c') for a read from the memory"]
#[inline(always)]
pub fn wait_fm_mem_rd(&mut self) -> WAIT_FM_MEM_RD_W {
WAIT_FM_MEM_RD_W { w: self }
}
#[doc = "Bits 8:11 - Number of C interface wait cycles (on 'clk_c') for a read from the high Voltage page latches. Common for reading HV Page Latches and the DATA_COMP_RESULT bit"]
#[inline(always)]
pub fn wait_fm_hv_rd(&mut self) -> WAIT_FM_HV_RD_W {
WAIT_FM_HV_RD_W { w: self }
}
#[doc = "Bits 16:18 - Number of C interface wait cycles (on 'clk_c') for a write to the high Voltage page latches."]
#[inline(always)]
pub fn wait_fm_hv_wr(&mut self) -> WAIT_FM_HV_WR_W {
WAIT_FM_HV_WR_W { w: self }
}
}
|
extern crate docker_rs;
use docker_rs::api::containers::Containers;
use docker_rs::api::version::Version;
use docker_rs::client::DockerClient;
use std::process::exit;
#[test]
fn test() {
let client = match DockerClient::new("unix:///var/run/docker.sock") {
Ok(a) => a,
Err(err) => {
println!("{}", err);
exit(1);
}
};
let _new_client = client.clone();
let info = client.get_version_info();
println!("{:?}", info);
let all_containers = client.list_all_containers(None).unwrap();
println!("{:?}", all_containers);
let running_cont = client.list_running_containers(None).unwrap();
println!("{:?}", running_cont);
let mut cmd: Vec<String> = Vec::new();
cmd.push("ls".to_string());
let res = client
.create_container_minimal("kk", "debian:jessie", cmd)
.unwrap();
println!("{:?}", res);
}
|
// order of operations defined at
// https://www.lua.org/manual/5.3/manual.html#3.4.8
// ^ (exponent)
// % // / * (multop)
// - + (plusop)
// .. (concat)
// >> << (bitshift)
// & (bitand)
// ~ (bitnot)
// | (bitor)
// == ~= >= <= > < (compareop)
// and (logicand)
// or (logicor)
use super::ast;
use lex::IStream;
use parse::expr::expr_consume;
use parse::{alt, kwd, many0, pair};
use parse::nom::IResult;
type EResult<'a, 'b> = IResult<&'b IStream<'a>, ast::Expr<'a>>;
pub fn parse_binops<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
// entry point for parsing binary operations
// this will go down the cascade of operators as described above
return logicor(i);
}
pub fn exponent<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = expr_consume(i)?;
let (i, binops) = many0(pair(
kwd("^"),
// this one feels pretty wrong...
maybe_unaryop,
))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn unary_op<'a, 'b>(i: &'b IStream<'a>) -> IResult<&'b IStream<'a>, String> {
// helper function
let (i, result) = alt((kwd("#"), kwd("-"), kwd("not"), kwd("~")))(i)?;
use lex::LexValue::Keyword;
match result {
Lex {
val: Keyword(k), ..
} => {
return Ok((i, k));
}
_ => {
panic!("Impossible code path");
}
}
}
fn maybe_unaryop<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
// not # - ~
let (i, mut unops) = many0(unary_op)(i)?;
let (i, inner_expr) = exponent(i)?;
let mut return_result = inner_expr;
// we need to apply unary ops in reverse order from their
// parsing
unops.reverse();
for unop in unops {
return_result = ast::Expr::UnOp(unop, Box::new(return_result));
}
return Ok((i, return_result));
}
fn multop<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = maybe_unaryop(i)?;
let (i, binops) = many0(pair(
alt((kwd("%"), kwd("//"), kwd("/"), kwd("*"))),
maybe_unaryop,
))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn plusop<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = multop(i)?;
let (i, binops) = many0(pair(alt((kwd("-"), kwd("+"))), multop))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn concat<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = plusop(i)?;
let (i, binops) = many0(pair(kwd(".."), concat))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn bitshift<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = concat(i)?;
let (i, binops) = many0(pair(alt((kwd(">>"), kwd("<<"))), bitshift))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn bitand<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = bitshift(i)?;
let (i, binops) = many0(pair(kwd("&"), bitand))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn bitnot<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = bitand(i)?;
let (i, binops) = many0(pair(kwd("~"), bitnot))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn bitor<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = bitnot(i)?;
let (i, binops) = many0(pair(kwd("|"), bitor))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn compareop<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = bitor(i)?;
let (i, binops) = many0(pair(
alt((
kwd("=="),
kwd("~="),
kwd(">="),
kwd("<="),
kwd(">"),
kwd("<"),
)),
compareop,
))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn logicand<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = compareop(i)?;
let (i, binops) = many0(pair(kwd("and"), logicand))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
fn logicor<'a, 'b>(i: &'b IStream<'a>) -> EResult<'a, 'b> {
let (i, first_expr) = logicand(i)?;
let (i, binops) = many0(pair(kwd("or"), logicor))(i)?;
return Ok((i, fold_binops(first_expr, binops)));
}
use lex::Lex;
fn fold_binops<'a>(
first_expr: ast::Expr<'a>,
binops: Vec<(Lex<'a>, ast::Expr<'a>)>,
) -> ast::Expr<'a> {
if binops.len() == 0 {
return first_expr;
} else {
// we take each binary operator and build up a new expression from it
let mut result_expression = first_expr;
for (operator, right_expr) in binops {
result_expression = ast::Expr::BinOp(
Box::new(result_expression),
operator.val,
Box::new(right_expr),
)
}
return result_expression;
}
}
#[cfg(test)]
mod test {
use super::*;
use ast::Expr;
use nom_locate::LocatedSpan;
use parse::expr;
use parse::try_specific_parse;
use pretty_assertions::assert_eq;
#[ignore]
#[test]
fn test_parse_binop() {
let result_expr = try_specific_parse(expr, "a+b+c").unwrap();
let expected = Expr::binop(
Expr::binop(Expr::name("a"), "+", Expr::name("b")),
"+",
Expr::name("c"),
);
assert_eq!(expected, result_expr);
// now try with minuses
let minus_expr = try_specific_parse(expr, "-a-b-c").unwrap();
let minus_expected = Expr::binop(
Expr::binop(Expr::unary("-", Expr::name("a")), "-", Expr::name("b")),
"-",
Expr::name("c"),
);
assert_eq!(minus_expected, minus_expr);
}
#[test]
fn test_fold_binops() {
// I want to test here that when folding we're being left
// associative rather than right associative
// expression is a + b + c
// we want (a + b) + c
// not a + (b + c)
let a = Expr::new_name("a", LocatedSpan::new(""));
let b = Expr::new_name("b", LocatedSpan::new(""));
let c = Expr::new_name("c", LocatedSpan::new(""));
let result = fold_binops(
a.clone(),
vec![
(Lex::kwd("+".to_string(), None), b.clone()),
(Lex::kwd("+".to_string(), None), c.clone()),
],
);
let expected = Expr::binop(Expr::binop(a, "+", b), "+", c);
assert_eq!(result, expected);
}
}
|
extern crate crossbeam;
use std::cell::{Cell, RefCell};
use std::sync::Mutex;
use std::sync::{Arc, RwLock};
use std::thread;
use std::borrow::BorrowMut;
/// An example implementation of the type notsync.
/// The std::cell::Cell type does not provide data security when shared across
/// multiple threads, because it allows mutation of its contents even through a
/// permanent shared reference.
mod notsync {
use super::*;
/// Struct with Cell data.
#[derive(Debug, Clone)]
pub struct Point {
x: Cell<i32>,
y: Cell<i32>,
}
/// Point methods.
impl Point {
/// New Point object.
pub fn new(x: Cell<i32>, y: Cell<i32>) -> Point {
Point { x: x, y: y }
}
/// Set method for value x.
pub fn set_x(&mut self, x: i32) {
self.x.set(x);
}
/// Set method for value y.
pub fn set_y(&mut self, y: i32) {
self.y.set(y);
}
/// Return value point x.
pub fn get_x(&self) -> i32 {
self.x.get()
}
}
#[cfg(test)]
mod test {
use notsync::*;
#[test]
fn test() {
let mut point: Point = Point::new(Cell::new(3), Cell::new(3));
{
let mut ref_point: &mut Point = &mut point;
//let point_clone:Point_send = point.clone();
crossbeam::scope(|scope_| {
scope_
.spawn(move || {
//ref_point.x.set(0);
//point_clone.x.set(0);
ref_point.set_x(0);
// println!("point_clone={:#?}",point_clone);
})
.join();
});
}
thread::sleep_ms(50);
// println!("point={:#?}",point);
assert_eq!(point.get_x(), 0);
}
}
}
/// An example implementation of type SyncAndSend.
/// For which it is safe to move the value to a stream and exchange a reference to the data.
mod sync_and_send {
use super::*;
/// Struct Point data.
#[derive(Debug, Clone)]
pub struct Point {
pub x: i32,
pub y: i32,
}
/// Point methods.
impl Point {
/// New Point object.
pub fn new(x: i32, y: i32) -> Arc<Mutex<Point>> {
Arc::new(Mutex::new(Point { x: x, y: y }))
}
}
#[cfg(test)]
mod test {
#[test]
fn test() {
use sync_and_send::*;
let mut point: Arc<Mutex<Point>> = Point::new(3, 3);
let clone_point = Arc::clone(&point);
crossbeam::scope(|scope_| {
scope_
.spawn(move || {
let mut mut_point = clone_point.lock();
match mut_point {
Ok(mut _mut_point) => {
_mut_point.x = 0;
}
Err(e) => {}
}
})
.join();
});
thread::sleep_ms(50);
assert_eq!(point.lock().unwrap().x, 0);
}
}
}
/// An example of an implementation of the type onlysync.
/// Not safe operation with raw pointers does not implement
/// the Send and Sync traits by default.
pub mod only_sync {
use super::*;
#[derive(Debug)]
pub struct OnlySync {
pub field: *mut i32,
}
/// Implements Sync trait.
unsafe impl Sync for OnlySync {}
/// OnlySync methods.
impl OnlySync {
/// New OnlySync object.
pub fn new() -> Arc<Mutex<OnlySync>> {
Arc::new(Mutex::new(OnlySync { field: &mut 1 }))
}
}
/// Implements Drop trait.
impl Drop for OnlySync {
fn drop(&mut self) {}
}
}
fn main() {
use only_sync::*;
let mut onlySync: Arc<Mutex<OnlySync>> = OnlySync::new();
use notsync::{self, Point as Point_send};
let mut point: Point_send = Point_send::new(Cell::new(3), Cell::new(3));
{
let mut ref_point: &mut Point_send = &mut point;
crossbeam::scope(|scope_| {
scope_
.spawn(move || {
//ref_point.x.set(0);
//point_clone.x.set(0);
ref_point.set_x(0);
// println!("point_clone={:#?}",point_clone);
})
.join();
});
}
thread::sleep_ms(50);
assert_eq!(point.get_x(), 0);
}
|
use std::cmp;
use vec::{ToVec2, Vec2};
use super::{View, ViewWrapper};
/// `BoxView` is a wrapper around an other view, with a given minimum size.
pub struct BoxView<T: View> {
size: Vec2,
view: T,
}
impl<T: View> BoxView<T> {
/// Creates a new `BoxView` with the given minimum size and content
///
/// # Example
///
/// ```
/// # use cursive::view::{BoxView,TextView};
/// // Creates a 20x4 BoxView with a TextView content.
/// let view = BoxView::new((20,4), TextView::new("Hello!"));
/// ```
pub fn new<S: ToVec2>(size: S, view: T) -> Self {
BoxView {
size: size.to_vec2(),
view: view,
}
}
}
impl<T: View> ViewWrapper for BoxView<T> {
wrap_impl!(&self.view);
fn wrap_get_min_size(&mut self, mut req: Vec2) -> Vec2 {
if self.size.x > 0 {
req.x = cmp::min(self.size.x, req.x);
}
if self.size.y > 0 {
req.y = cmp::min(self.size.y, req.y);
}
let mut size = self.view.get_min_size(req);
// Did he think he got to decide?
// Of course we have the last word here.
if self.size.x > 0 {
size.x = self.size.x;
}
if self.size.y > 0 {
size.y = self.size.y;
}
size
}
}
|
use std::iter::successors;
use std::time::Instant;
use itertools::Itertools;
const INPUT: &str = include_str!("../input.txt");
fn look_and_say(input: &str) -> String {
let mut s = String::new();
for (key, group) in &input.chars().group_by(|&c| c) {
s.push_str(&group.count().to_string());
s.push(key);
}
s
}
fn part1() -> usize {
successors(Some(INPUT.to_string()), |s| Some(look_and_say(s)))
.nth(40)
.unwrap()
.len()
}
fn part2() -> usize {
successors(Some(INPUT.to_string()), |s| Some(look_and_say(s)))
.nth(50)
.unwrap()
.len()
}
fn main() {
let start = Instant::now();
println!("part 1: {}", part1());
println!("part 1 took {}ms", (Instant::now() - start).as_millis());
let start = Instant::now();
println!("part 2: {}", part2());
println!("part 2 took {}ms", (Instant::now() - start).as_millis());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 492982);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 6989950);
}
}
|
pub trait Marshal {
fn marshal(&self, scratch: &mut [u8]);
}
impl Marshal for u8 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self;
}
}
impl Marshal for u16 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
}
}
impl Marshal for u32 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
scratch[2] = (self >> 16) as u8;
scratch[3] = (self >> 24) as u8;
}
}
impl Marshal for u64 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
scratch[2] = (self >> 16) as u8;
scratch[3] = (self >> 24) as u8;
scratch[4] = (self >> 32) as u8;
scratch[5] = (self >> 40) as u8;
scratch[6] = (self >> 48) as u8;
scratch[7] = (self >> 56) as u8;
}
}
impl Marshal for i8 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
}
}
impl Marshal for i16 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
}
}
impl Marshal for i32 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
scratch[2] = (self >> 16) as u8;
scratch[3] = (self >> 24) as u8;
}
}
impl Marshal for i64 {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
scratch[1] = (self >> 8) as u8;
scratch[2] = (self >> 16) as u8;
scratch[3] = (self >> 24) as u8;
scratch[4] = (self >> 32) as u8;
scratch[5] = (self >> 40) as u8;
scratch[6] = (self >> 48) as u8;
scratch[7] = (self >> 56) as u8;
}
}
impl Marshal for f32 {
fn marshal(&self, scratch: &mut [u8]) {
let bits = self.to_bits();
scratch[0] = bits as u8;
scratch[1] = (bits >> 8) as u8;
scratch[2] = (bits >> 16) as u8;
scratch[3] = (bits >> 24) as u8;
}
}
impl Marshal for f64 {
fn marshal(&self, scratch: &mut [u8]) {
let bits = self.to_bits();
scratch[0] = bits as u8;
scratch[1] = (bits >> 8) as u8;
scratch[2] = (bits >> 16) as u8;
scratch[3] = (bits >> 24) as u8;
scratch[4] = (bits >> 32) as u8;
scratch[5] = (bits >> 40) as u8;
scratch[6] = (bits >> 48) as u8;
scratch[7] = (bits >> 56) as u8;
}
}
impl Marshal for bool {
fn marshal(&self, scratch: &mut [u8]) {
scratch[0] = *self as u8;
}
}
#[cfg(test)]
mod test {
use std::fmt;
use crate::types::{Marshal, StatBuffer, Unmarshal};
use rand::distributions::{Distribution, Standard};
use rand::random;
fn test_some<T>()
where
T: Copy + fmt::Debug + StatBuffer + Marshal + Unmarshal<T> + PartialEq,
Standard: Distribution<T>,
{
for _ in 0..100 {
let mut buffer = T::buffer();
let v = random::<T>();
v.marshal(buffer.as_mut());
let u = T::unmarshal(buffer.as_ref());
assert_eq!(v, u);
}
}
#[test]
fn test_u8() {
test_some::<u8>()
}
#[test]
fn test_u16() {
test_some::<u16>()
}
#[test]
fn test_u32() {
test_some::<u32>()
}
#[test]
fn test_u64() {
test_some::<u64>()
}
#[test]
fn test_i8() {
test_some::<i8>()
}
#[test]
fn test_i16() {
test_some::<i16>()
}
#[test]
fn test_i32() {
test_some::<i32>()
}
#[test]
fn test_i64() {
test_some::<i64>()
}
#[test]
fn test_f32() {
test_some::<f32>()
}
#[test]
fn test_f64() {
test_some::<f64>()
}
#[test]
fn test_bool() {
test_some::<bool>()
}
}
|
#![deny(warnings)]
extern crate orbclient;
use std::fs::File;
use std::env;
use std::io::{Read, Write};
use wav::WavFile;
use orbclient::{EventOption, Window, K_ESC};
mod wav;
fn main() {
let url = match env::args().nth(1) {
Some(arg) => arg,
None => "none:".to_string(),
};
let mut vec: Vec<u8> = Vec::new();
if let Ok(mut file) = File::open(&url) {
file.read_to_end(&mut vec).unwrap();
}
let mut window = Window::new(-1, -1, 320, 32, &("Player (".to_string() + &url + ")")).unwrap();
window.sync();
let wav = WavFile::from_data(&vec);
if !wav.data.is_empty() {
if let Ok(mut audio) = File::open("audio://") {
audio.write(&wav.data).unwrap();
}
}
loop {
for event in window.events() {
if let EventOption::Key(key_event) = event.to_option() {
if key_event.pressed && key_event.scancode == K_ESC {
return;
}
}
if let EventOption::Quit(_) = event.to_option() {
return;
}
}
}
}
|
use std::str::FromStr;
use std::fmt;
/// Store roll parameters
///
/// ** Parameters: **
/// - Count: number of dice you want to roll
/// - Sides: number of sides to each dice
#[derive(Eq, PartialEq)]
pub struct RollCmd {
count: u32,
sides: u32,
}
impl RollCmd {
// Construct a new RollCmd. Count, then Sides.
pub fn new(c: u32, s: u32) -> RollCmd {
RollCmd { count: c, sides: s }
}
/// Generates a new RollResult based on a RollCmd.
///
/// Each RollCmd can be used repeatedly; this function will generate new
/// RollResults each time.
/// Because this is a higher order function it's up to the caller to provide
/// an appropriate 'random value of range' function.
///
/// # Examples
///
/// Here we provide result with a max function, returning the highest
/// possible value for each roll.
/// ```
/// use rcmd::RollCmd;
/// let cmd = RollCmd::new(2, 6);
/// let result = cmd.result(|max| max);
/// assert!([6, 6] == result.values());
/// ```
pub fn result<F: FnMut(u32) -> u32>(&self, mut f: F) -> RollResult {
RollResult((0..self.count).map(|_| f(self.sides)).collect())
}
}
impl FromStr for RollCmd {
type Err = String;
/// Convert a string to a Result with a RollCmd struct.
fn from_str(s: &str) -> Result<RollCmd, <RollCmd as FromStr>::Err> {
let split: Vec<u32> = s.split('d').filter_map(|n| n.parse().ok()).collect();
// Based on number of items grabbed by split, Ok(RollCmd) or Err
match split.len() {
// Could do this cleaner with a slice pattern, but that would require nightly :\
2 => {
let (count, sides) = (split[0], split[1]);
Ok(RollCmd::new(count, sides))
}
1 => {
let sides = split[0];
Ok(RollCmd::new(1, sides))
}
_ => Err(format!("Invalid RollCmd: {}", s))
}
}
}
/// A vector of u32 representing the result of a RollCmd.
///
/// RollResult allows us to provide specialized function impementations for
/// dealing with roll results.
pub struct RollResult(Vec<u32>);
impl RollResult {
/// Returns an iterator over the result of a roll.
///
/// Basically returns an iterator on the underlying vector.
pub fn iter<'a>(&'a self) -> std::slice::Iter<'a, u32> {
self.0.iter()
}
pub fn total(&self) -> u32 { // maybe change to u64?
// TODO: Does this repeat RollResult::iter?
self.0.iter().fold(0, |a, b| a + b)
}
/// Returns the individual rolls as a slice.
///
/// Basically unwraps the RollResult into it's underlying Vec<u32>
pub fn values(&self) -> &[u32] {
&self.0
}
}
impl fmt::Display for RollResult {
/// Implement Display for Rollresult.
///
/// # Examples
/// ```
/// use rcmd::RollResult;
/// let result = RollResult(vec![2, 3, 3]);
/// assert!(result.to_string() == "2, 3, 3 (Total: 8)");
/// ```
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let as_strings: Vec<_> = self.iter().map(|n| n.to_string()).collect();
write!(f, "{} (Total: {})", as_strings.join(", "), self.total())
}
}
#[cfg(test)]
mod rollcmd_tests {
use super::*;
// FromStr tests
#[test]
fn can_parse_full_rollcmds() {
let cmd = RollCmd::new(2, 6);
assert!(cmd == "2d6".parse().unwrap());
}
#[test]
fn can_parse_short_rollcmds() {
let cmd = RollCmd::new(1, 6);
assert!(cmd == "6".parse().unwrap());
}
}
|
use crate::provider::storage::{ClientStorage, StoreError};
use crate::provider::ClientLedger;
use crypto::identity::{DummyMixIdentityPrivateKey, MixnetIdentityPrivateKey};
use futures::lock::Mutex as FMutex;
use hmac::{Hmac, Mac};
use sfw_provider_requests::requests::{
ProviderRequestError, ProviderRequests, PullRequest, RegisterRequest,
};
use sfw_provider_requests::responses::{ProviderResponse, PullResponse, RegisterResponse};
use sfw_provider_requests::AuthToken;
use sha2::Sha256;
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Arc;
type HmacSha256 = Hmac<Sha256>;
#[derive(Debug)]
pub enum ClientProcessingError {
ClientDoesntExistError,
StoreError,
InvalidRequest,
WrongToken,
IOError,
}
impl From<ProviderRequestError> for ClientProcessingError {
fn from(_: ProviderRequestError) -> Self {
use ClientProcessingError::*;
InvalidRequest
}
}
impl From<StoreError> for ClientProcessingError {
fn from(e: StoreError) -> Self {
match e {
StoreError::ClientDoesntExistError => ClientProcessingError::ClientDoesntExistError,
_ => ClientProcessingError::StoreError,
}
}
}
impl From<io::Error> for ClientProcessingError {
fn from(_: io::Error) -> Self {
use ClientProcessingError::*;
IOError
}
}
#[derive(Debug)]
pub(crate) struct ClientProcessingData {
store_dir: PathBuf,
registered_clients_ledger: Arc<FMutex<ClientLedger>>,
secret_key: DummyMixIdentityPrivateKey,
}
impl ClientProcessingData {
pub(crate) fn new(
store_dir: PathBuf,
registered_clients_ledger: Arc<FMutex<ClientLedger>>,
secret_key: DummyMixIdentityPrivateKey,
) -> Self {
ClientProcessingData {
store_dir,
registered_clients_ledger,
secret_key,
}
}
pub(crate) fn add_arc(self) -> Arc<Self> {
Arc::new(self)
}
}
pub(crate) struct ClientRequestProcessor;
impl ClientRequestProcessor {
pub(crate) async fn process_client_request(
data: &[u8],
processing_data: Arc<ClientProcessingData>,
) -> Result<Vec<u8>, ClientProcessingError> {
let client_request = ProviderRequests::from_bytes(&data)?;
println!("Received the following request: {:?}", client_request);
match client_request {
ProviderRequests::Register(req) => Ok(ClientRequestProcessor::register_new_client(
req,
processing_data,
)
.await?
.to_bytes()),
ProviderRequests::PullMessages(req) => Ok(
ClientRequestProcessor::process_pull_messages_request(req, processing_data)
.await?
.to_bytes(),
),
}
}
async fn process_pull_messages_request(
req: PullRequest,
processing_data: Arc<ClientProcessingData>,
) -> Result<PullResponse, ClientProcessingError> {
// TODO: this lock is completely unnecessary as we're only reading the data.
// Wait for https://github.com/nymtech/nym-sfw-provider/issues/19 to resolve.
let unlocked_ledger = processing_data.registered_clients_ledger.lock().await;
println!("Processing pull!");
if unlocked_ledger.has_token(req.auth_token) {
// drop the mutex so that we could do IO without blocking others wanting to get the lock
drop(unlocked_ledger);
let retrieved_messages = ClientStorage::retrieve_client_files(
req.destination_address,
processing_data.store_dir.as_path(),
)?;
Ok(PullResponse::new(retrieved_messages))
} else {
Err(ClientProcessingError::WrongToken)
}
}
async fn register_new_client(
req: RegisterRequest,
processing_data: Arc<ClientProcessingData>,
) -> Result<RegisterResponse, ClientProcessingError> {
println!("Processing register new client request!");
let mut unlocked_ledger = processing_data.registered_clients_ledger.lock().await;
let auth_token = ClientRequestProcessor::generate_new_auth_token(
req.destination_address.to_vec(),
processing_data.secret_key,
);
if !unlocked_ledger.has_token(auth_token) {
unlocked_ledger.insert_token(auth_token, req.destination_address);
ClientRequestProcessor::create_storage_dir(
req.destination_address,
processing_data.store_dir.as_path(),
)?;
}
Ok(RegisterResponse::new(auth_token))
}
fn create_storage_dir(
client_address: sphinx::route::DestinationAddressBytes,
store_dir: &Path,
) -> io::Result<()> {
let client_dir_name = hex::encode(client_address);
let full_store_dir = store_dir.join(client_dir_name);
std::fs::create_dir_all(full_store_dir)
}
fn generate_new_auth_token(data: Vec<u8>, key: DummyMixIdentityPrivateKey) -> AuthToken {
let mut auth_token_raw =
HmacSha256::new_varkey(&key.to_bytes()).expect("HMAC can take key of any size");
auth_token_raw.input(&data);
let mut auth_token = [0u8; 32];
auth_token.copy_from_slice(&auth_token_raw.result().code().to_vec());
auth_token
}
}
#[cfg(test)]
mod register_new_client {
// use super::*;
// TODO: those tests require being called in async context. we need to research how to test this stuff...
// #[test]
// fn registers_new_auth_token_for_each_new_client() {
// let req1 = RegisterRequest {
// destination_address: [1u8; 32],
// };
// let registered_client_ledger = ClientLedger::new();
// let store_dir = PathBuf::from("./foo/");
// let key = Scalar::from_bytes_mod_order([1u8; 32]);
// let client_processing_data = ClientProcessingData::new(store_dir, registered_client_ledger, key).add_arc_futures_mutex();
//
//
// // need to do async....
// client_processing_data.lock().await;
// assert_eq!(0, registered_client_ledger.0.len());
// ClientRequestProcessor::register_new_client(
// req1,
// client_processing_data.clone(),
// );
//
// assert_eq!(1, registered_client_ledger.0.len());
//
// let req2 = RegisterRequest {
// destination_address: [2u8; 32],
// };
// ClientRequestProcessor::register_new_client(
// req2,
// client_processing_data,
// );
// assert_eq!(2, registered_client_ledger.0.len());
// }
//
// #[test]
// fn registers_given_token_only_once() {
// let req1 = RegisterRequest {
// destination_address: [1u8; 32],
// };
// let registered_client_ledger = ClientLedger::new();
// let store_dir = PathBuf::from("./foo/");
// let key = Scalar::from_bytes_mod_order([1u8; 32]);
// let client_processing_data = ClientProcessingData::new(store_dir, registered_client_ledger, key).add_arc_futures_mutex();
//
// ClientRequestProcessor::register_new_client(
// req1,
// client_processing_data.clone(),
// );
// let req2 = RegisterRequest {
// destination_address: [1u8; 32],
// };
// ClientRequestProcessor::register_new_client(
// req2,
// client_processing_data.clone(),
// );
//
// client_processing_data.lock().await;
//
// assert_eq!(1, registered_client_ledger.0.len())
// }
}
#[cfg(test)]
mod create_storage_dir {
use super::*;
use sphinx::route::DestinationAddressBytes;
#[test]
fn it_creates_a_correct_storage_directory() {
let client_address: DestinationAddressBytes = [1u8; 32];
let store_dir = Path::new("/tmp/");
ClientRequestProcessor::create_storage_dir(client_address, store_dir).unwrap();
}
}
#[cfg(test)]
mod generating_new_auth_token {
use super::*;
#[test]
fn for_the_same_input_generates_the_same_auth_token() {
let data1 = vec![1u8; 55];
let data2 = vec![1u8; 55];
let key = DummyMixIdentityPrivateKey::from_bytes(&[1u8; 32]);
let token1 = ClientRequestProcessor::generate_new_auth_token(data1, key);
let token2 = ClientRequestProcessor::generate_new_auth_token(data2, key);
assert_eq!(token1, token2);
}
#[test]
fn for_different_inputs_generates_different_auth_tokens() {
let data1 = vec![1u8; 55];
let data2 = vec![2u8; 55];
let key = DummyMixIdentityPrivateKey::from_bytes(&[1u8; 32]);
let token1 = ClientRequestProcessor::generate_new_auth_token(data1, key);
let token2 = ClientRequestProcessor::generate_new_auth_token(data2, key);
assert_ne!(token1, token2);
let data1 = vec![1u8; 50];
let data2 = vec![2u8; 55];
let key = DummyMixIdentityPrivateKey::from_bytes(&[1u8; 32]);
let token1 = ClientRequestProcessor::generate_new_auth_token(data1, key);
let token2 = ClientRequestProcessor::generate_new_auth_token(data2, key);
assert_ne!(token1, token2);
}
}
|
use crate::color::{color, Color};
use crate::hittable::HitRecord;
use crate::material::{
dielectric::Dielectric, diffuse::Diffuse, isotropic::Isotropic, lambertian::Lambertian,
metal::Metal,
};
use crate::pdf::PdfType;
use crate::ray::Ray;
use crate::texture::{solidcolor::SolidColor, Texture};
use crate::vec::Vec3;
use enum_dispatch::enum_dispatch;
use rand::rngs::SmallRng;
pub mod dielectric;
pub mod diffuse;
pub mod isotropic;
pub mod lambertian;
pub mod metal;
pub struct Scatter {
pub ray: Ray,
pub attenuation: Color,
pub pdf: Option<PdfType>,
}
#[enum_dispatch]
pub trait Material: Clone {
fn scatter(&self, _rayin: &Ray, _hit: &HitRecord, _rng: &mut SmallRng) -> Option<Scatter> {
None
}
fn scattering_pdf(&self, _rayin: &Ray, _hit: &HitRecord, _scattered: &Ray) -> f64 {
1.0
}
fn emitted(&self, _rayin: &Ray, _hit: &HitRecord, _u: f64, _v: f64, _p: Vec3) -> Color {
color(0.0, 0.0, 0.0)
}
}
#[enum_dispatch(Material)]
#[derive(Debug, Clone)]
pub enum MaterialType {
Isotropic,
Lambertian,
Metal,
Dielectric,
Diffuse,
}
impl Default for MaterialType {
fn default() -> MaterialType {
MaterialType::from(Lambertian {
albedo: Texture::from(SolidColor {
color: color(0.0, 1.0, 1.0),
}),
})
}
}
|
use guion::{EventResp, render::link::RenderLink, event::compound::EventCompound};
use super::*;
impl<S,E> GHandler<E> for Handler<S,E> where S: GHandler<E>, E: Env + Sync {
#[inline]
fn _render(l: Link<E>, r: &mut RenderLink<E>) {
S::_render(l,r)
//todo!() //TODO impl everything
}
#[inline]
fn _event_direct(l: Link<E>, e: &EventCompound<E>) -> EventResp {
S::_event_direct(l,e)
}
#[inline]
fn _event_root(l: Link<E>, e: &EventCompound<E>) -> EventResp {
S::_event_root(l,e)
}
#[inline]
fn _size(l: Link<E>, e: &EStyle<E>) -> ESize<E> {
//todo!();
S::_size(l,e)
}
#[inline]
fn _send_event(l: Link<E>, e: &EventCompound<E>, child: E::WidgetPath) -> Result<EventResp,E::Error> {
S::_send_event(l,e,child)
}
}
|
use crate::consts::*;
use std::io;
// todo: we should use overlapped api (async await)
pub(crate) fn command(handle: &nihao_usb::Handle, cmd0: u8, cmd1: u8, resp_len: usize) -> io::Result<Vec<u8>> {
let mut s = vec![0u8; STLINK_CMD_SIZE_V2];
s[0] = cmd0;
s[1] = cmd1;
let mut r = vec![0u8; resp_len];
handle.write_pipe(STLINK_TX_EP, &s)?;
handle.read_pipe(STLINK_RX_EP, &mut r)?;
Ok(r)
}
pub(crate) fn debug_command(handle: &nihao_usb::Handle, cmd0: u8, cmd1: u8, resp_len: usize) -> io::Result<Vec<u8>> {
let mut s = vec![0u8; STLINK_CMD_SIZE_V2];
s[0] = STLINK_DEBUG_COMMAND;
s[1] = cmd0;
s[2] = cmd1;
let mut r = vec![0u8; resp_len];
handle.write_pipe(STLINK_TX_EP, &s)?;
handle.read_pipe(STLINK_RX_EP, &mut r)?;
Ok(r)
}
|
use arrayvec::ArrayString;
#[derive(Clone)]
pub struct Cmdline {
cmdline: ArrayString<128>,
argv0: ArrayString<128>,
}
impl Cmdline {
pub fn new() -> Cmdline {
Cmdline {
cmdline: ArrayString::new(),
argv0: ArrayString::new(),
}
}
pub fn from_argv(argv: &[&[u8]]) -> Cmdline {
let mut cmdline = Cmdline::new();
cmdline.set_by_argv(argv);
cmdline
}
pub fn as_str(&self) -> &str {
&self.cmdline
}
pub fn argv0(&self) -> &str {
&self.argv0
}
pub fn set_by_argv(&mut self, argv: &[&[u8]]) {
self.cmdline.clear();
for (i, arg) in argv.iter().enumerate() {
self.cmdline
.push_str(core::str::from_utf8(arg).unwrap_or("[invalid utf-8]"));
if i != argv.len() - 1 {
self.cmdline.push(' ');
}
}
self.argv0.clear();
self.argv0.push_str(self.cmdline.split(' ').next().unwrap());
}
}
|
use chrono::{NaiveDate, NaiveTime, Utc};
use serde::{Serialize, Deserialize, Deserializer, Serializer, de::{Error, Unexpected, Visitor}};
use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct FrontMatter {
#[serde(default = "default_id")]
pub id: String,
pub title: Option<String>,
#[serde(deserialize_with = "from_date_string", serialize_with = "to_date_string", default = "default_date")]
pub date: NaiveDate,
#[serde(deserialize_with = "from_time_string", serialize_with = "to_time_string", default = "default_time")]
pub time: NaiveTime,
}
fn default_id() -> String {
Uuid::new_v4().to_simple().to_string()
}
fn default_date() -> NaiveDate {
let utc = Utc::now();
utc.naive_local().date()
}
fn default_time() -> NaiveTime {
let utc = Utc::now();
utc.naive_local().time()
}
impl Default for FrontMatter {
fn default() -> Self {
Self {
id: default_id(),
title: None,
date: default_date(),
time: default_time()
}
}
}
struct DateVisitor;
impl<'de> Visitor<'de> for DateVisitor {
type Value = NaiveDate;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string representing a date in the format YYYY-MM-DD")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: Error, {
match NaiveDate::parse_from_str(v, "%Y-%m-%d") {
Ok(date) => {
Ok(date)
}
Err(_e) => {
Err(Error::invalid_value(Unexpected::Str(v), &self))
}
}
}
}
fn from_date_string<'de, D>(d: D) -> Result<NaiveDate, D::Error> where D: Deserializer<'de> {
d.deserialize_str(DateVisitor)
}
fn to_date_string<S>(date: &NaiveDate, s: S) -> Result<S::Ok, S::Error> where S: Serializer {
let str = date.format("%Y-%m-%d").to_string();
s.serialize_str(&str)
}
struct TimeVisitor;
impl<'de> Visitor<'de> for TimeVisitor {
type Value = NaiveTime;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string representing a time in the format HH:MM:SS")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: Error, {
match NaiveTime::parse_from_str(v, "%H:%M:%S") {
Ok(time) => {
Ok(time)
}
Err(_e) => {
Err(Error::invalid_value(Unexpected::Str(v), &self))
}
}
}
}
fn from_time_string<'de, D>(d: D) -> Result<NaiveTime, D::Error> where D: Deserializer<'de> {
d.deserialize_str(TimeVisitor)
}
fn to_time_string<S>(date: &NaiveTime, s: S) -> Result<S::Ok, S::Error> where S: Serializer {
let str = date.format("%H:%M:%S").to_string();
s.serialize_str(&str)
}
#[cfg(test)]
mod tests {
use chrono::{NaiveDate, NaiveTime};
use super::FrontMatter;
#[test]
fn serializes_date() {
let id = "123e4567-e89b-12d3-a456-426614174000".to_string();
let title = Some("test_note".to_string());
let date = NaiveDate::from_ymd(2021, 4, 7);
let time = NaiveTime::from_hms(23, 08, 15);
let front_matter = FrontMatter {
id,
title,
date,
time
};
let serialized = serde_yaml::to_string(&front_matter).unwrap();
println!("{}", serialized);
}
#[test]
fn deserialize_date() {
let dt = NaiveDate::from_ymd(2021, 5, 1);
let fm = r#"title: serialized note
date: 2021-05-01"#;
let front_matter: FrontMatter = serde_yaml::from_str(fm).unwrap();
assert_eq!(Some("serialized note".to_string()), front_matter.title);
assert_eq!(dt.clone(), front_matter.date);
}
} |
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::time::Instant;
const INPUT: &str = include_str!("../input.txt");
fn is_vowel(c: u8) -> bool {
matches!(c, b'a' | b'e' | b'i' | b'o' | b'u')
}
fn is_nice_part_1(s: &str) -> bool {
let mut vowel_count = 0;
let mut has_double_letter = false;
let bytes = s.as_bytes();
if is_vowel(bytes[0]) {
vowel_count += 1;
}
for i in 1..bytes.len() {
let previous = bytes[i - 1];
let current = bytes[i];
if matches!(
(previous, current),
(b'a', b'b') | (b'c', b'd') | (b'p', b'q') | (b'x', b'y')
) {
return false;
}
if previous == current {
has_double_letter = true;
}
if is_vowel(current) {
vowel_count += 1;
}
}
vowel_count >= 3 && has_double_letter
}
fn is_nice_part_2(s: &str) -> bool {
let bytes = s.as_bytes();
let mut pairs: HashMap<(u8, u8), usize> = HashMap::new();
let mut has_valid_pair = false;
let mut has_valid_repeat = false;
let mut i = 1;
while i < bytes.len() && !(has_valid_pair && has_valid_repeat) {
let current_pair = (bytes[i - 1], bytes[i]);
let entry = pairs.entry(current_pair);
match entry {
Entry::Occupied(occupied_entry) => {
if *occupied_entry.get() < i - 1 {
has_valid_pair = true;
}
}
Entry::Vacant(vacant_entry) => {
vacant_entry.insert(i);
}
}
if let Some(next_byte) = bytes.get(i + 1) {
if *next_byte == bytes[i - 1] {
has_valid_repeat = true;
}
}
i += 1;
}
has_valid_pair && has_valid_repeat
}
fn part1() -> usize {
INPUT.lines().filter(|s| is_nice_part_1(s)).count()
}
fn part2() -> usize {
INPUT.lines().filter(|s| is_nice_part_2(s)).count()
}
fn main() {
let start = Instant::now();
println!("part 1: {}", part1());
println!("part 1 took {}ms", (Instant::now() - start).as_millis());
let start = Instant::now();
println!("part 2: {}", part2());
println!("part 2 took {}ms", (Instant::now() - start).as_millis());
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_part1() {
assert_eq!(part1(), 236);
}
#[test]
fn test_part2() {
assert_eq!(part2(), 51);
}
}
|
#[doc = "Reader of register PIDR0"]
pub type R = crate::R<u32, super::PIDR0>;
#[doc = "Reader of field `PIDR0`"]
pub type PIDR0_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:31 - peripheral ID0"]
#[inline(always)]
pub fn pidr0(&self) -> PIDR0_R {
PIDR0_R::new((self.bits & 0xffff_ffff) as u32)
}
}
|
use alquitran::issues::Hint;
use alquitran::issues::Issue;
use alquitran::lint::lint_string_field;
#[test]
fn conforming_string_field() {
let bytes = b"Portable_String.123\0\0";
let result = lint_string_field(&bytes[..]);
assert!(result.hints.is_empty());
assert!(result.issues.is_empty());
assert_eq!("Portable_String.123".as_bytes(), result.value.unwrap());
}
#[test]
fn empty_string_field() {
let bytes = b"\0";
let result = lint_string_field(&bytes[..]);
assert!(result.hints.is_empty());
assert!(result.issues.is_empty());
assert_eq!("".as_bytes(), result.value.unwrap());
}
#[test]
fn no_string_field() {
let bytes = b"";
let result = lint_string_field(&bytes[..]);
assert!(result.hints.is_empty());
assert!(result.issues.is_empty());
assert!(result.value.is_none());
}
#[test]
fn unportable_char_in_string_field() {
let bytes = b"user@host\0";
let result = lint_string_field(&bytes[..]);
assert_eq!(1, result.hints.len());
assert!(result.hints.contains(&(Hint::UnportableCharInString, 4)));
assert!(result.issues.is_empty());
assert_eq!("user@host".as_bytes(), result.value.unwrap());
}
#[test]
fn unterminated_string_field() {
let bytes = b"string";
let result = lint_string_field(&bytes[..]);
assert!(result.hints.is_empty());
assert_eq!(1, result.issues.len());
assert!(result.issues.contains(&(Issue::UnterminatedString, 5)));
assert!(result.value.is_none());
}
#[test]
fn unused_byte_not_nul_in_string_field() {
let bytes = b"string\0x";
let result = lint_string_field(&bytes[..]);
assert!(result.hints.is_empty());
assert_eq!(1, result.issues.len());
assert!(result.issues.contains(&(Issue::UnusedByteNotNul, 7)));
assert_eq!("string".as_bytes(), result.value.unwrap());
}
|
use std::fs;
use sdl2::{event::Event, rect::Rect, pixels::Color, render::{Canvas, Texture}, video::{self}};
mod sdl;
#[derive(Debug, Clone, Copy)]
struct Window {
width: i32,
height: i32,
}
fn draw_word(canvas: &mut Canvas<video::Window>, word: &str) {
let fonts = word.lines()
.map(|line| line.trim()).map(|line| line.chars()
.map(|c| c == '1').collect::<Vec<bool>>());
for (j, line) in fonts.enumerate() {
for (i, pixel) in line.iter().enumerate() {
if *pixel {
canvas.set_draw_color(Color::RGBA(0, 0, 0, 255));
} else {
canvas.set_draw_color(Color::RGBA(255, 255, 255, 255));
}
canvas.fill_rect(Rect::new(i as i32 * 1 + 100, j as i32 * 1 + 100, 1, 1)).unwrap();
}
}
}
fn handle_events(event_pump: &mut sdl2::EventPump){
for event in event_pump.poll_iter() {
// println!("frame: {}, event {:?}", frame_counter, event);
match event {
Event::Quit { .. } => ::std::process::exit(0),
_ => {}
}
}
}
fn char_position_in_atlas(c: char, letter_width: u32, letter_height: u32) -> Rect {
Rect::new(letter_width as i32 * (c as i32 - 33), 0, letter_width as u32, letter_height)
}
fn move_right_one_char(target: &mut Rect, letter_width: u32) {
target.set_x(target.x + letter_width as i32);
}
fn draw_string(canvas: &mut Canvas<video::Window>, font_texture: &Texture, target: &mut Rect,letter_width: usize, letter_height: usize, text: &str) -> Result<(), String> {
for char in text.chars() {
move_right_one_char(target, letter_width as u32);
canvas.copy(font_texture, char_position_in_atlas(char, letter_width as u32, letter_height as u32), *target)?
}
Ok(())
}
// Idea. Either 128x128 or 256x256.
// Make it so that each pixel can be addressed by a single number
// or a 2d coordinate. Then I can merely increment a single number
// to write anywhere on the screen.
// Might make some of the initial stuff way easier.
fn main() -> Result<(), String> {
let window = Window {
width: 1200,
height: 800,
};
let palette = vec![
Color::RGB(0x00, 0x00, 0x00),
Color::RGB(0x1D, 0x2B, 0x53),
Color::RGB(0x7E, 0x25, 0x53),
Color::RGB(0x00, 0x87, 0x51),
Color::RGB(0xAB, 0x52, 0x36),
Color::RGB(0x5F, 0x57, 0x4F),
Color::RGB(0xC2, 0xC3, 0xC7),
Color::RGB(0xFF, 0xF1, 0xE8),
Color::RGB(0xFF, 0x00, 0x4D),
Color::RGB(0xFF, 0xA3, 0x00),
Color::RGB(0xFF, 0xEC, 0x27),
Color::RGB(0x00, 0xE4, 0x36),
Color::RGB(0x29, 0xAD, 0xFF),
Color::RGB(0x83, 0x76, 0x9C),
Color::RGB(0xFF, 0x77, 0xA8),
Color::RGB(0xFF, 0xCC, 0xAA),
];
let sdl::SdlContext {
mut event_pump,
mut canvas,
texture_creator,
ttf_context,
video: _,
} = sdl::setup_sdl(window.width as usize, window.height as usize)?;
let (mut texture, letter_width, letter_height) = sdl::draw_font_texture(&texture_creator, ttf_context)?;
loop {
canvas.set_draw_color(Color::BLACK);
canvas.clear();
canvas.set_draw_color(Color::WHITE);
for (i, color) in palette.iter().enumerate() {
let offset = 2;
let size = 40;
let x = i as i32 / size;
let y = i as i32 % size;
canvas.set_draw_color(Color::WHITE);
canvas.fill_rect(Rect::new(x * 10, y * size + offset, size as u32, size as u32))?;
canvas.set_draw_color(*color);
canvas.fill_rect(Rect::new(x * 10 + 1, y * size + 1 + offset, size as u32 - 2, size as u32 - 2))?;
}
canvas.set_draw_color(Color::WHITE);
canvas.fill_rect(Rect::new(window.width - 140, 20, 128, 128)).unwrap();
let mut target = Rect::new(200, 200, letter_width as u32, letter_height as u32);
draw_string(&mut canvas, &texture, &mut target, letter_width, letter_height, "Add x 1")?;
canvas.present();
handle_events(&mut event_pump);
}
}
|
#[doc = "Register `ISR` reader"]
pub type R = crate::R<ISR_SPEC>;
#[doc = "Field `JEOCF` reader - End of injected conversion flag"]
pub type JEOCF_R = crate::BitReader<JEOCF_A>;
#[doc = "End of injected conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JEOCF_A {
#[doc = "0: No injected conversion has completed"]
Clear = 0,
#[doc = "1: An injected conversion has completed and its data may be read"]
Set = 1,
}
impl From<JEOCF_A> for bool {
#[inline(always)]
fn from(variant: JEOCF_A) -> Self {
variant as u8 != 0
}
}
impl JEOCF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JEOCF_A {
match self.bits {
false => JEOCF_A::Clear,
true => JEOCF_A::Set,
}
}
#[doc = "No injected conversion has completed"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == JEOCF_A::Clear
}
#[doc = "An injected conversion has completed and its data may be read"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == JEOCF_A::Set
}
}
#[doc = "Field `REOCF` reader - End of regular conversion flag"]
pub type REOCF_R = crate::BitReader<REOCF_A>;
#[doc = "End of regular conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum REOCF_A {
#[doc = "0: No regular conversion has completed"]
Clear = 0,
#[doc = "1: A regular conversion has completed and its data may be read"]
Set = 1,
}
impl From<REOCF_A> for bool {
#[inline(always)]
fn from(variant: REOCF_A) -> Self {
variant as u8 != 0
}
}
impl REOCF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> REOCF_A {
match self.bits {
false => REOCF_A::Clear,
true => REOCF_A::Set,
}
}
#[doc = "No regular conversion has completed"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == REOCF_A::Clear
}
#[doc = "A regular conversion has completed and its data may be read"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == REOCF_A::Set
}
}
#[doc = "Field `JOVRF` reader - Injected conversion overrun flag"]
pub type JOVRF_R = crate::BitReader<JOVRF_A>;
#[doc = "Injected conversion overrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JOVRF_A {
#[doc = "0: No injected conversion overrun has occurred"]
Clear = 0,
#[doc = "1: An injected conversion overrun has occurred, which means that an injected conversion finished while JEOCF was already ‘1’. JDATAR is not affected by overruns"]
Set = 1,
}
impl From<JOVRF_A> for bool {
#[inline(always)]
fn from(variant: JOVRF_A) -> Self {
variant as u8 != 0
}
}
impl JOVRF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JOVRF_A {
match self.bits {
false => JOVRF_A::Clear,
true => JOVRF_A::Set,
}
}
#[doc = "No injected conversion overrun has occurred"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == JOVRF_A::Clear
}
#[doc = "An injected conversion overrun has occurred, which means that an injected conversion finished while JEOCF was already ‘1’. JDATAR is not affected by overruns"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == JOVRF_A::Set
}
}
#[doc = "Field `ROVRF` reader - Regular conversion overrun flag"]
pub type ROVRF_R = crate::BitReader<ROVRF_A>;
#[doc = "Regular conversion overrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ROVRF_A {
#[doc = "0: No regular conversion overrun has occurred"]
Clear = 0,
#[doc = "1: A regular conversion overrun has occurred, which means that a regular conversion finished while REOCF was already ‘1’. RDATAR is not affected by overruns"]
Set = 1,
}
impl From<ROVRF_A> for bool {
#[inline(always)]
fn from(variant: ROVRF_A) -> Self {
variant as u8 != 0
}
}
impl ROVRF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ROVRF_A {
match self.bits {
false => ROVRF_A::Clear,
true => ROVRF_A::Set,
}
}
#[doc = "No regular conversion overrun has occurred"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == ROVRF_A::Clear
}
#[doc = "A regular conversion overrun has occurred, which means that a regular conversion finished while REOCF was already ‘1’. RDATAR is not affected by overruns"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == ROVRF_A::Set
}
}
#[doc = "Field `AWDF` reader - Analog watchdog"]
pub type AWDF_R = crate::BitReader<AWDF_A>;
#[doc = "Analog watchdog\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AWDF_A {
#[doc = "0: No Analog watchdog event occurred"]
Clear = 0,
#[doc = "1: The analog watchdog block detected voltage which crosses the value programmed in the DFSDM_FLTxAWLTR or DFSDM_FLTxAWHTR registers"]
Set = 1,
}
impl From<AWDF_A> for bool {
#[inline(always)]
fn from(variant: AWDF_A) -> Self {
variant as u8 != 0
}
}
impl AWDF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AWDF_A {
match self.bits {
false => AWDF_A::Clear,
true => AWDF_A::Set,
}
}
#[doc = "No Analog watchdog event occurred"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == AWDF_A::Clear
}
#[doc = "The analog watchdog block detected voltage which crosses the value programmed in the DFSDM_FLTxAWLTR or DFSDM_FLTxAWHTR registers"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == AWDF_A::Set
}
}
#[doc = "Field `JCIP` reader - Injected conversion in progress status"]
pub type JCIP_R = crate::BitReader<JCIP_A>;
#[doc = "Injected conversion in progress status\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JCIP_A {
#[doc = "0: No request to convert the injected channel group (neither by software nor by trigger) has been issued"]
NotInProgress = 0,
#[doc = "1: The conversion of the injected channel group is in progress or a request for a injected conversion is pending, due either to ‘1’ being written to JSWSTART or to a trigger detection"]
InProgress = 1,
}
impl From<JCIP_A> for bool {
#[inline(always)]
fn from(variant: JCIP_A) -> Self {
variant as u8 != 0
}
}
impl JCIP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JCIP_A {
match self.bits {
false => JCIP_A::NotInProgress,
true => JCIP_A::InProgress,
}
}
#[doc = "No request to convert the injected channel group (neither by software nor by trigger) has been issued"]
#[inline(always)]
pub fn is_not_in_progress(&self) -> bool {
*self == JCIP_A::NotInProgress
}
#[doc = "The conversion of the injected channel group is in progress or a request for a injected conversion is pending, due either to ‘1’ being written to JSWSTART or to a trigger detection"]
#[inline(always)]
pub fn is_in_progress(&self) -> bool {
*self == JCIP_A::InProgress
}
}
#[doc = "Field `RCIP` reader - Regular conversion in progress status"]
pub type RCIP_R = crate::BitReader<RCIP_A>;
#[doc = "Regular conversion in progress status\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RCIP_A {
#[doc = "0: No request to convert the regular channel has been issued"]
NotInProgress = 0,
#[doc = "1: The conversion of the regular channel is in progress or a request for a regular conversion is pending"]
InProgress = 1,
}
impl From<RCIP_A> for bool {
#[inline(always)]
fn from(variant: RCIP_A) -> Self {
variant as u8 != 0
}
}
impl RCIP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RCIP_A {
match self.bits {
false => RCIP_A::NotInProgress,
true => RCIP_A::InProgress,
}
}
#[doc = "No request to convert the regular channel has been issued"]
#[inline(always)]
pub fn is_not_in_progress(&self) -> bool {
*self == RCIP_A::NotInProgress
}
#[doc = "The conversion of the regular channel is in progress or a request for a regular conversion is pending"]
#[inline(always)]
pub fn is_in_progress(&self) -> bool {
*self == RCIP_A::InProgress
}
}
#[doc = "Field `CKABF` reader - Clock absence flag"]
pub type CKABF_R = crate::FieldReader<CKABF_A>;
#[doc = "Clock absence flag\n\nValue on reset: 255"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum CKABF_A {
#[doc = "0: Clock signal on channel y is present."]
Clear = 0,
#[doc = "1: Clock signal on channel y is not present"]
Set = 1,
}
impl From<CKABF_A> for u8 {
#[inline(always)]
fn from(variant: CKABF_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for CKABF_A {
type Ux = u8;
}
impl CKABF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<CKABF_A> {
match self.bits {
0 => Some(CKABF_A::Clear),
1 => Some(CKABF_A::Set),
_ => None,
}
}
#[doc = "Clock signal on channel y is present."]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == CKABF_A::Clear
}
#[doc = "Clock signal on channel y is not present"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == CKABF_A::Set
}
}
#[doc = "Field `SCDF` reader - short-circuit detector flag"]
pub type SCDF_R = crate::FieldReader<SCDF_A>;
#[doc = "short-circuit detector flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum SCDF_A {
#[doc = "0: No short-circuit detector event occurred on channel y"]
Clear = 0,
#[doc = "1: The short-circuit detector counter reaches, on channel y, the value programmed in the DFSDM_CHyAWSCDR registers"]
Set = 1,
}
impl From<SCDF_A> for u8 {
#[inline(always)]
fn from(variant: SCDF_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for SCDF_A {
type Ux = u8;
}
impl SCDF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SCDF_A> {
match self.bits {
0 => Some(SCDF_A::Clear),
1 => Some(SCDF_A::Set),
_ => None,
}
}
#[doc = "No short-circuit detector event occurred on channel y"]
#[inline(always)]
pub fn is_clear(&self) -> bool {
*self == SCDF_A::Clear
}
#[doc = "The short-circuit detector counter reaches, on channel y, the value programmed in the DFSDM_CHyAWSCDR registers"]
#[inline(always)]
pub fn is_set(&self) -> bool {
*self == SCDF_A::Set
}
}
impl R {
#[doc = "Bit 0 - End of injected conversion flag"]
#[inline(always)]
pub fn jeocf(&self) -> JEOCF_R {
JEOCF_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - End of regular conversion flag"]
#[inline(always)]
pub fn reocf(&self) -> REOCF_R {
REOCF_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Injected conversion overrun flag"]
#[inline(always)]
pub fn jovrf(&self) -> JOVRF_R {
JOVRF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Regular conversion overrun flag"]
#[inline(always)]
pub fn rovrf(&self) -> ROVRF_R {
ROVRF_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Analog watchdog"]
#[inline(always)]
pub fn awdf(&self) -> AWDF_R {
AWDF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 13 - Injected conversion in progress status"]
#[inline(always)]
pub fn jcip(&self) -> JCIP_R {
JCIP_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - Regular conversion in progress status"]
#[inline(always)]
pub fn rcip(&self) -> RCIP_R {
RCIP_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bits 16:23 - Clock absence flag"]
#[inline(always)]
pub fn ckabf(&self) -> CKABF_R {
CKABF_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - short-circuit detector flag"]
#[inline(always)]
pub fn scdf(&self) -> SCDF_R {
SCDF_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
#[doc = "interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ISR_SPEC;
impl crate::RegisterSpec for ISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`isr::R`](R) reader structure"]
impl crate::Readable for ISR_SPEC {}
#[doc = "`reset()` method sets ISR to value 0x00ff_0000"]
impl crate::Resettable for ISR_SPEC {
const RESET_VALUE: Self::Ux = 0x00ff_0000;
}
|
use crossbeam;
use petgraph;
use rayon::prelude::*;
use std::io;
use wasm_bindgen::prelude::*;
mod builder;
mod model;
mod parser;
mod scanner;
#[wasm_bindgen]
extern "C" {
#[wasm_bindgen(js_namespace = console)]
fn log(s: &str);
}
#[wasm_bindgen]
pub async fn graph() -> String {
let graph = run(
"develop".into(),
"example".into(),
"sns_topic.tf".into(),
"sns_subscription.tf".into(),
false,
);
let s = format!("{:?}", graph);
unsafe {
log(s.as_ref());
}
let graph = graph.unwrap();
let json = serde_json::to_string(&graph).unwrap();
json
}
pub fn run(
environment: String,
base_file_path: String,
topic_file_path: String,
subscription_file_path: String,
is_conccurent: bool,
) -> Result<petgraph::graph::Graph<String, String>, io::Error> {
if is_conccurent {
run_concurrent(
environment,
base_file_path,
topic_file_path,
subscription_file_path,
)
} else {
run_serial(
environment,
base_file_path,
topic_file_path,
subscription_file_path,
)
}
}
fn run_serial(
environment: String,
base_file_path: String,
topic_file_path: String,
subscription_file_path: String,
) -> Result<petgraph::graph::Graph<String, String>, io::Error> {
let (topic_files, topic_contents) =
scanner::scan(&environment, &base_file_path, &topic_file_path)?;
let (subscription_files, subscription_contents) =
scanner::scan(&environment, &base_file_path, &subscription_file_path)?;
let topic_services = parse_services(topic_files, topic_contents);
let subscription_services = parse_services(subscription_files, subscription_contents);
let graph = builder::build(topic_services, subscription_services);
Ok(graph)
}
fn parse_services(files: Vec<String>, contents: Vec<String>) -> Vec<model::Service> {
files
.into_iter()
.zip(contents.into_iter())
.map(|(file, content)| {
let splited = file.split('/').last();
let file_name = splited.map_or("", |s| s.trim_matches('"')).to_owned();
let resource = parser::parse(content);
model::Service::new(file_name, resource)
})
.collect()
}
fn run_concurrent(
environment: String,
base_file_path: String,
topic_file_path: String,
subscription_file_path: String,
) -> Result<petgraph::graph::Graph<String, String>, io::Error> {
let topic_parse_handle = crossbeam::scope(|scope| {
let handle: crossbeam::thread::ScopedJoinHandle<Result<Vec<model::Service>, io::Error>> =
scope.spawn(|_| {
let (topic_files, topic_contents) =
scanner::scan_concurrent(&environment, &base_file_path, &topic_file_path)?;
Ok(parse_services_concurrent(topic_files, topic_contents))
});
handle.join().unwrap()
});
let subscription_parse_handle = crossbeam::scope(|scope| {
let handle: crossbeam::thread::ScopedJoinHandle<Result<Vec<model::Service>, io::Error>> =
scope.spawn(|_| {
let (subscription_files, subscription_contents) = scanner::scan_concurrent(
&environment,
&base_file_path,
&subscription_file_path,
)?;
Ok(parse_services_concurrent(
subscription_files,
subscription_contents,
))
});
handle.join().unwrap()
});
let topic_services = topic_parse_handle.unwrap()?;
let subscription_services = subscription_parse_handle.unwrap()?;
let graph = builder::build_concurrent(topic_services, subscription_services);
Ok(graph)
}
fn parse_services_concurrent(files: Vec<String>, contents: Vec<String>) -> Vec<model::Service> {
files
.into_par_iter()
.zip(contents.into_par_iter())
.map(|(file, content)| {
let splited = file.split('/').last();
let file_name = splited.map_or("", |s| s.trim_matches('"')).to_owned();
let resource = parser::parse(content);
model::Service::new(file_name, resource)
})
.collect()
}
|
use sp_ipld::Ipld;
use std::fmt;
use crate::{
ipld_error::IpldError,
literal::Literal,
term::Term,
yatima,
};
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
pub enum BoolOp {
Eql,
Lte,
Lth,
Gte,
Gth,
And,
Or,
Xor,
Not,
}
impl BoolOp {
pub fn symbol(self) -> String {
match self {
Self::Eql => "eql".to_owned(),
Self::Lte => "lte".to_owned(),
Self::Lth => "lth".to_owned(),
Self::Gte => "gte".to_owned(),
Self::Gth => "gth".to_owned(),
Self::And => "and".to_owned(),
Self::Or => "or".to_owned(),
Self::Xor => "xor".to_owned(),
Self::Not => "not".to_owned(),
}
}
pub fn from_symbol(x: &str) -> Option<Self> {
match x {
"eql" => Some(Self::Eql),
"lte" => Some(Self::Lte),
"lth" => Some(Self::Lth),
"gte" => Some(Self::Gte),
"gth" => Some(Self::Gth),
"and" => Some(Self::And),
"or" => Some(Self::Or),
"xor" => Some(Self::Xor),
"not" => Some(Self::Not),
_ => None,
}
}
pub fn type_of(self) -> Term {
match self {
Self::Eql => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Lte => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Lth => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Gte => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Gth => yatima!("∀ #Bool #Bool -> #Bool"),
Self::And => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Or => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Xor => yatima!("∀ #Bool #Bool -> #Bool"),
Self::Not => yatima!("∀ #Bool -> #Bool"),
}
}
pub fn to_ipld(self) -> Ipld {
match self {
Self::Eql => Ipld::Integer(0),
Self::Lte => Ipld::Integer(1),
Self::Lth => Ipld::Integer(2),
Self::Gte => Ipld::Integer(3),
Self::Gth => Ipld::Integer(4),
Self::And => Ipld::Integer(5),
Self::Or => Ipld::Integer(6),
Self::Xor => Ipld::Integer(7),
Self::Not => Ipld::Integer(8),
}
}
pub fn from_ipld(ipld: &Ipld) -> Result<Self, IpldError> {
match ipld {
Ipld::Integer(0) => Ok(Self::Eql),
Ipld::Integer(1) => Ok(Self::Lte),
Ipld::Integer(2) => Ok(Self::Lth),
Ipld::Integer(3) => Ok(Self::Gte),
Ipld::Integer(4) => Ok(Self::Gth),
Ipld::Integer(5) => Ok(Self::And),
Ipld::Integer(6) => Ok(Self::Or),
Ipld::Integer(7) => Ok(Self::Xor),
Ipld::Integer(8) => Ok(Self::Not),
xs => Err(IpldError::BoolOp(xs.to_owned())),
}
}
pub fn arity(self) -> u64 {
match self {
Self::Eql => 2,
Self::Lth => 2,
Self::Lte => 2,
Self::Gth => 2,
Self::Gte => 2,
Self::And => 2,
Self::Or => 2,
Self::Xor => 2,
Self::Not => 1,
}
}
pub fn apply1(self, x: &Literal) -> Option<Literal> {
use Literal::*;
match (self, x) {
(Self::Not, Bool(x)) => Some(Bool(!x)),
_ => None,
}
}
pub fn apply2(self, x: &Literal, y: &Literal) -> Option<Literal> {
use Literal::*;
match (self, x, y) {
(Self::Eql, Bool(x), Bool(y)) => Some(Bool(x == y)),
(Self::Lth, Bool(x), Bool(y)) => Some(Bool(x < y)),
(Self::Lte, Bool(x), Bool(y)) => Some(Bool(x <= y)),
(Self::Gth, Bool(x), Bool(y)) => Some(Bool(x > y)),
(Self::And, Bool(x), Bool(y)) => Some(Bool(x & y)),
(Self::Or, Bool(x), Bool(y)) => Some(Bool(x | y)),
(Self::Xor, Bool(x), Bool(y)) => Some(Bool(x ^ y)),
_ => None,
}
}
}
impl fmt::Display for BoolOp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.symbol())
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use quickcheck::{
Arbitrary,
Gen,
};
use rand::Rng;
impl Arbitrary for BoolOp {
fn arbitrary(_g: &mut Gen) -> Self {
let mut rng = rand::thread_rng();
let gen: u32 = rng.gen_range(0..8);
match gen {
0 => Self::Eql,
1 => Self::Lte,
2 => Self::Lth,
3 => Self::Gte,
4 => Self::Gth,
5 => Self::And,
6 => Self::Or,
7 => Self::Xor,
_ => Self::Not,
}
}
}
#[quickcheck]
fn nat_op_ipld(x: BoolOp) -> bool {
match BoolOp::from_ipld(&x.to_ipld()) {
Ok(y) => x == y,
_ => false,
}
}
//#[test]
// fn test_apply_bin_op() {
// assert_eq!(
// Some(Literal::Text(ropey::Rope::from_str("foo"))),
// apply_bin_op(
// PrimOp::TextCons,
// Literal::Char('f'),
// Literal::Text(ropey::Rope::from_str("oo"))
// )
// )
//}
}
|
#![no_std]
#![feature(abi_x86_interrupt)]
use lazy_static::lazy_static;
use core::ffi::c_void;
use core::mem;
use game::{Direction, Game};
use spin::Mutex;
use pic8259_simple::ChainedPics;
mod game;
mod screens;
const WHITE: u16 = 0x0F00;
const BLUE: u16 = 0x0100;
const RED: u16 = 0x0400;
lazy_static! {
static ref IDT: [IDTEntry; 256] = {
let mut idt = [IDTEntry::unused(); 256];
idt[0x08] = IDTEntry::new(double_fault as _);
idt[0x20] = IDTEntry::new(timer_interrupt as _);
idt[0x21] = IDTEntry::new(keyboard_interrupt as _);
idt
};
static ref GAME: Mutex<Game> = Mutex::new(Game::new());
static ref SCREEN: Mutex<&'static mut [[u16; 80]; 25]> = Mutex::new(unsafe {
&mut *(0xB8000 as *mut [[u16; 80]; 25])
});
}
static PICS: Mutex<ChainedPics> = Mutex::new(unsafe {
ChainedPics::new(0x20, 0x28)
});
fn print(color: u16, y0: usize, x0: usize, s: &str) {
let mut screen = SCREEN.lock();
let mut x = x0;
let mut y = y0;
for &c in s.as_bytes() {
if c == b'\n' {
y += 1;
x = x0;
} else {
screen[y][x] = color | c as u16;
x += 1;
}
}
}
fn print_score(y: usize, mut x: usize, mut score: u32) {
let mut screen = SCREEN.lock();
let start = x - 10;
if score == 0 {
screen[y][x] = WHITE | 0x30;
x -= 1;
} else {
while score != 0 {
screen[y][x] = WHITE | 0x30 | (score % 10) as u16;
x -= 1;
score /= 10;
}
}
while x != start {
screen[y][x] = WHITE | 0x20;
x -= 1;
}
}
#[no_mangle]
pub extern fn start() {
// Disable the cursor.
unsafe {
x86::io::outb(0x3D4, 0x0A);
x86::io::outb(0x3D5, 0x20);
}
// Print the instructions.
print(WHITE, 0, 50, screens::INSTRUCTIONS);
// Set things up so that we start receiving interrupts.
unsafe {
enable_interrupts();
}
}
unsafe fn enable_interrupts() {
// Tell the computer where to find the interrupt table.
x86::dtables::lidt(&x86::dtables::DescriptorTablePointer {
limit: (256 * mem::size_of::<IDTEntry>() - 1) as u16,
base: IDT.as_ptr(),
});
// Initialize the PICs.
PICS.lock().initialize();
// Enable interrupts.
x86::irq::enable();
}
extern "x86-interrupt" fn double_fault(_: *const c_void) {}
extern "x86-interrupt" fn timer_interrupt(_: *const c_void) {
let mut game = GAME.lock();
game.tick();
match game.state {
game::State::Title => {
print(WHITE, 0, 0, screens::TITLE);
}
game::State::Countdown(n) => {
let screen = match n {
0 => screens::ZERO,
1 => screens::ONE,
2 => screens::TWO,
_ => screens::THREE,
};
print(WHITE, 0, 0, screen);
}
game::State::Main => {
let mut screen = SCREEN.lock();
for y in 0..25 {
for x in 0..50 {
screen[y][x] = match game.board[[y, x]] {
game::Cell::Blue => 0x1020,
game::Cell::Red => 0x4020,
game::Cell::Empty => 0x0220,
};
}
}
screen[game.blue.pos[0]][game.blue.pos[1]] = 0x1020;
screen[game.red.pos[0]][game.red.pos[1]] = 0x4020;
},
game::State::Death => {
let (screen, color) = match (game.blue.alive, game.red.alive) {
(false, false) => (screens::TIE, WHITE),
(false, true) => (screens::WIN, RED),
(true, false) => (screens::WIN, BLUE),
(true, true) => unreachable!(),
};
print(color, 0, 0, screen);
}
game::State::Paused => {
print(WHITE, 0, 0, screens::PAUSED);
}
}
print_score(7, 78, game.blue.score);
print_score(8, 78, game.red.score);
unsafe {
PICS.lock().notify_end_of_interrupt(0x20);
}
}
extern "x86-interrupt" fn keyboard_interrupt(_: *const c_void) {
let code = unsafe { x86::io::inb(0x60) };
let mut game = GAME.lock();
match code {
57 => game.start(), // Space
25 => game.pause(), // P
16 => game.quit(), // Quit
17 => game.input(false, Direction::Up), // W
31 => game.input(false, Direction::Down), // S
30 => game.input(false, Direction::Left), // A
32 => game.input(false, Direction::Right), // D
72 => game.input(true, Direction::Up), // Up
80 => game.input(true, Direction::Down), // Down
75 => game.input(true, Direction::Left), // Left
77 => game.input(true, Direction::Right), // Right
_ => {}
}
unsafe {
PICS.lock().notify_end_of_interrupt(0x21);
}
}
#[derive(Clone, Copy, Default)]
#[repr(C)]
struct IDTEntry {
/// The address of the handler's entrypoint.
offset_lo: u16,
/// The segment (always 0x08).
selector: u16,
/// Flags.
flags: u16,
/// The rest of the offset.
offset_hi: u16,
}
impl IDTEntry {
const fn new(offset: u32) -> Self {
Self {
offset_lo: offset as _,
selector: 0x08,
flags: 0b10001110_00000000,
offset_hi: (offset >> 16) as _,
}
}
const fn unused() -> Self {
Self {
offset_lo: 0,
selector: 0,
flags: 0,
offset_hi: 0,
}
}
}
#[panic_handler]
fn panic(_: &core::panic::PanicInfo) -> ! {
loop {}
}
|
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
pub struct Metadata {
pub name: String,
}
impl Default for Metadata {
fn default() -> Self {
Self {
name: "Untitled scene".to_owned(),
}
}
}
|
// -------------------------------------------------------------------------------//
// Cryptopals, Set 2, Challenge 9: https://cryptopals.com/sets/2/challenges/9
// Impl by Frodo45127
// -------------------------------------------------------------------------------//
use crate::utils::padd_to_end;
pub fn challenge() {
let source_data = b"YELLOW SUBMARINE";
let target_data = b"YELLOW SUBMARINE\x04\x04\x04\x04";
let result = padd_to_end(source_data, 20);
assert_eq!(target_data.to_vec(), result);
}
|
use core::ops::{Bound, RangeBounds};
/// Represents a `value` at a given `span` of an input.
#[derive(Clone, Debug, PartialEq)]
pub struct Span<T> {
start: usize,
end: usize,
value: T,
}
impl<T> Span<T> {
/// Construct a new `Span` from a range and a value.
pub fn new<R: RangeBounds<usize>>(range: R, value: T) -> Self {
Span {
start: match range.start_bound() {
Bound::Included(n) => *n,
Bound::Excluded(n) => *n + 1,
_ => panic!("Span cannot be constructed with unbounded ranges"),
},
end: match range.end_bound() {
Bound::Included(n) => *n + 1,
Bound::Excluded(n) => *n,
_ => panic!("Span cannot be constructed with unbounded ranges"),
},
value,
}
}
/// Get the (inclusive) start index of the span.
pub fn start(&self) -> usize {
self.start
}
/// Get the (exclusive) end index of the span.
pub fn end(&self) -> usize {
self.end
}
/// Retrieve the value from the span, discarding the range information.
pub fn take(self) -> T {
self.value
}
/// Transform the value in a span using the given function.
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Span<U> {
Span {
start: self.start,
end: self.end,
value: f(self.value),
}
}
/// Offset the range of the span relative to the given end index.
///
/// This just adds `end` to the span's range `start` and `end`.
pub fn relative_to(mut self, end: usize) -> Self {
self.start += end;
self.end += end;
self
}
}
#[cfg(test)]
mod tests {
use super::Span;
#[derive(Debug, PartialEq)]
struct Value;
#[test]
fn test_start() {
let span = Span::new(0..1, Value);
assert_eq!(span.start(), 0);
}
#[test]
fn test_end() {
let span = Span::new(0..1, Value);
assert_eq!(span.end(), 1);
}
#[test]
fn test_take() {
let span = Span::new(0..1, Value);
assert_eq!(span.take(), Value);
}
#[test]
fn test_map() {
let span = Span::new(0..1, Value);
assert_eq!(span.map(|_| 2), Span::new(0..1, 2));
}
#[test]
fn test_relative_to() {
let span = Span::new(0..10, ());
assert_eq!(span.relative_to(5), Span::new(5..15, ()));
}
}
|
use std::{fmt, hash};
use crate::{
Context, ContextHandle, ExclusivelyContextual, Polarity, AtomId, ForkId, JoinId, AcesError,
AcesErrorKind,
domain::{Dotset, DotId, DotsetId},
};
/// A common type of one-to-many and many-to-one elements of the fuset
/// representation of c-e structures.
///
/// A wide edge represents a monomial attached to a dot. There are
/// two variants of a `Wedge`: [`Join`] represents causes and [`Fork`]
/// represents effects.
#[derive(Clone, Eq)]
pub struct Wedge {
pub(crate) atom_id: Option<AtomId>,
polarity: Polarity,
tip_id: DotId,
pit_id: DotsetId,
}
impl Wedge {
pub(crate) fn new(polarity: Polarity, tip_id: DotId, pit_id: DotsetId) -> Self {
Wedge { atom_id: None, polarity, tip_id, pit_id }
}
/// [`Fork`]'s constructor.
///
/// See also [`Wedge::new_fork_unchecked()`].
pub fn new_fork<I>(ctx: &ContextHandle, tip_id: DotId, arm_ids: I) -> ForkId
where
I: IntoIterator<Item = DotId>,
{
let pit = Dotset::new(arm_ids);
trace!("New fork: {:?} -> {:?}", tip_id, pit);
ctx.lock().unwrap().share_fork_from_tip_and_pit(tip_id, pit)
}
/// [`Join`]'s constructor.
///
/// See also [`Wedge::new_join_unchecked()`].
pub fn new_join<I>(ctx: &ContextHandle, tip_id: DotId, arm_ids: I) -> JoinId
where
I: IntoIterator<Item = DotId>,
{
let pit = Dotset::new(arm_ids);
trace!("New join: {:?} <- {:?}", tip_id, pit);
ctx.lock().unwrap().share_join_from_tip_and_pit(tip_id, pit)
}
/// A more efficient variant of [`Wedge::new_fork()`].
///
/// Note: new [`Fork`] is created under the assumption that
/// `arm_ids` are nonempty and listed in ascending order. If the
/// caller fails to provide an ordered pit, the library may panic
/// in some other call (the constructor itself panics immediately
/// in debug mode).
#[inline]
pub fn new_fork_unchecked<I>(ctx: &ContextHandle, tip_id: DotId, arm_ids: I) -> ForkId
where
I: IntoIterator<Item = DotId>,
{
let pit = Dotset::new_unchecked(arm_ids);
trace!("New fork: {:?} -> {:?}", tip_id, pit);
ctx.lock().unwrap().share_fork_from_tip_and_pit(tip_id, pit)
}
/// A more efficient variant of [`Wedge::new_join()`].
///
/// Note: new [`Join`] is created under the assumption that
/// `arm_ids` are nonempty and listed in ascending order. If the
/// caller fails to provide an ordered pit, the library may panic
/// in some other call (the constructor itself panics immediately
/// in debug mode).
#[inline]
pub fn new_join_unchecked<I>(ctx: &ContextHandle, tip_id: DotId, arm_ids: I) -> JoinId
where
I: IntoIterator<Item = DotId>,
{
let pit = Dotset::new_unchecked(arm_ids);
trace!("New join: {:?} <- {:?}", tip_id, pit);
ctx.lock().unwrap().share_join_from_tip_and_pit(tip_id, pit)
}
#[inline]
pub fn get_atom_id(&self) -> AtomId {
match self.polarity {
Polarity::Tx => self.atom_id.expect("Attempt to access an uninitialized fork"),
Polarity::Rx => self.atom_id.expect("Attempt to access an uninitialized join"),
}
}
#[inline]
pub fn get_fork_id(&self) -> Option<ForkId> {
match self.polarity {
Polarity::Tx => Some(ForkId(self.get_atom_id())),
Polarity::Rx => None,
}
}
#[inline]
pub fn get_join_id(&self) -> Option<JoinId> {
match self.polarity {
Polarity::Tx => None,
Polarity::Rx => Some(JoinId(self.get_atom_id())),
}
}
#[inline]
pub fn get_polarity(&self) -> Polarity {
self.polarity
}
#[inline]
pub fn get_tip_id(&self) -> DotId {
self.tip_id
}
#[inline]
pub fn get_pit_id(&self) -> DotsetId {
self.pit_id
}
}
impl fmt::Debug for Wedge {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{} {{ atom_id: ",
match self.polarity {
Polarity::Tx => "Fork",
Polarity::Rx => "Join",
}
)?;
self.atom_id.fmt(f)?;
write!(f, ", tip_id: ")?;
self.tip_id.fmt(f)?;
write!(f, ", pit_id: ")?;
self.pit_id.fmt(f)?;
write!(f, " }}")
}
}
impl PartialEq for Wedge {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.polarity == other.polarity
&& self.tip_id == other.tip_id
&& self.pit_id == other.pit_id
}
}
impl hash::Hash for Wedge {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.polarity.hash(state);
self.tip_id.hash(state);
self.pit_id.hash(state);
}
}
impl ExclusivelyContextual for Wedge {
fn format_locked(&self, ctx: &Context) -> Result<String, AcesError> {
let tip_name = ctx.get_dot_name(self.tip_id).ok_or(match self.polarity {
Polarity::Tx => AcesError::from(AcesErrorKind::DotMissingForFork(Polarity::Tx)),
Polarity::Rx => AcesError::from(AcesErrorKind::DotMissingForJoin(Polarity::Rx)),
})?;
let pit = ctx
.get_dotset(self.pit_id)
.ok_or_else(|| AcesError::from(AcesErrorKind::DotsetMissingForId(self.pit_id)))?;
let arm_names: Result<Vec<_>, AcesError> = pit
.get_dot_ids()
.iter()
.map(|&dot_id| {
ctx.get_dot_name(dot_id).ok_or(match self.polarity {
Polarity::Tx => AcesError::from(AcesErrorKind::DotMissingForFork(Polarity::Rx)),
Polarity::Rx => AcesError::from(AcesErrorKind::DotMissingForJoin(Polarity::Tx)),
})
})
.collect();
match self.polarity {
Polarity::Tx => Ok(format!("({} > {:?})", tip_name, arm_names?)),
Polarity::Rx => Ok(format!("({:?} > {})", arm_names?, tip_name)),
}
}
}
/// Forward wide edge: representation of effects.
pub type Fork = Wedge;
/// Backward wide edge: representation of causes.
pub type Join = Wedge;
|
use std::convert::TryFrom;
use anyhow::{anyhow, Result};
use crate::day12::InstructionType::Rotate;
use crate::Challenge;
pub struct Day12;
impl Challenge for Day12 {
const DAY_NUMBER: u32 = 12;
type InputType = Vec<Instruction>;
type OutputType = u32;
fn part1(input: &Self::InputType) -> Result<Self::OutputType> {
let mut state = State::default();
input.iter().for_each(|i| i.apply_ship(&mut state));
Ok(state.coords.0.abs() as u32 + state.coords.1.abs() as u32)
}
fn part2(input: &Self::InputType) -> Result<Self::OutputType> {
let mut state = State::default();
let mut waypoint = Waypoint(10, -1);
input
.iter()
.for_each(|i| i.apply_waypoint(&mut state, &mut waypoint));
Ok(state.coords.0.abs() as u32 + state.coords.1.abs() as u32)
}
fn parse(content: &str) -> Result<Self::InputType> {
crate::utils::parse_line_separated_list(content)
}
}
#[derive(Debug)]
pub struct State {
pub coords: (i32, i32),
pub orientation: Direction,
}
impl Default for State {
fn default() -> Self {
Self {
coords: (0, 0),
orientation: Direction::East,
}
}
}
impl State {
pub fn move_by(&mut self, offset: (i32, i32)) {
self.coords = (self.coords.0 + offset.0, self.coords.1 + offset.1)
}
pub fn move_toward(&mut self, direction: Direction, amount: i32) {
let offset = direction.get_offset();
self.coords.0 += offset.0 * amount;
self.coords.1 += offset.1 * amount;
}
pub fn rotate(&mut self, rotation: Rotation, amount: i32) {
self.orientation = self.orientation.rotated(rotation, amount);
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Waypoint(pub i32, pub i32);
impl Waypoint {
pub fn move_toward(&mut self, direction: Direction, amount: i32) {
let offset = direction.get_offset();
self.0 += offset.0 * amount;
self.1 += offset.1 * amount;
}
pub fn rotate(&mut self, rotation: Rotation, amount: i32) {
let (rotation, amount) = normalize_rotation(rotation, amount);
*self = match (rotation, amount) {
(_, 0) => *self,
(_, 180) => Self(-self.0, -self.1),
(Rotation::Right, 90) => Self(-self.1, self.0),
(Rotation::Left, 90) => Self(self.1, -self.0),
_ => panic!("Unexpected rotation amount {}", amount),
};
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Instruction {
typ: InstructionType,
amount: i32,
}
impl Instruction {
pub fn apply_ship(&self, state: &mut State) {
match self.typ {
InstructionType::Move(direction) => state.move_toward(direction, self.amount),
InstructionType::Rotate(rotation) => state.rotate(rotation, self.amount),
InstructionType::Forward => state.move_toward(state.orientation, self.amount),
}
}
pub fn apply_waypoint(&self, state: &mut State, waypoint: &mut Waypoint) {
match self.typ {
InstructionType::Move(direction) => waypoint.move_toward(direction, self.amount),
InstructionType::Rotate(rotation) => waypoint.rotate(rotation, self.amount),
InstructionType::Forward => {
state.move_by((waypoint.0 * self.amount, waypoint.1 * self.amount))
}
}
}
}
impl std::str::FromStr for Instruction {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let typ = InstructionType::try_from(
s.chars()
.next()
.ok_or_else(|| anyhow!("Empty instruction"))?,
)?;
let amount = s[1..].parse()?;
Ok(Self { typ, amount })
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum InstructionType {
Move(Direction),
Rotate(Rotation),
Forward,
}
impl TryFrom<char> for InstructionType {
type Error = anyhow::Error;
fn try_from(s: char) -> Result<Self, Self::Error> {
match s {
'N' => Ok(InstructionType::Move(Direction::North)),
'S' => Ok(InstructionType::Move(Direction::South)),
'E' => Ok(InstructionType::Move(Direction::East)),
'W' => Ok(InstructionType::Move(Direction::West)),
'L' => Ok(Rotate(Rotation::Left)),
'R' => Ok(Rotate(Rotation::Right)),
'F' => Ok(InstructionType::Forward),
_ => Err(anyhow!("Unknown instruction type {}", s)),
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Direction {
North,
South,
East,
West,
}
impl Direction {
pub fn get_offset(self) -> (i32, i32) {
match self {
Direction::North => (0, -1),
Direction::South => (0, 1),
Direction::East => (1, 0),
Direction::West => (-1, 0),
}
}
pub fn rotated(self, rotation: Rotation, amount: i32) -> Direction {
let (rotation, amount) = normalize_rotation(rotation, amount);
match (self, rotation, amount) {
(_, _, 0) => self,
(_, _, 180) => self.opposite(),
(Direction::West, Rotation::Right, 90) | (Direction::East, Rotation::Left, 90) => {
Direction::North
}
(Direction::North, Rotation::Right, 90) | (Direction::South, Rotation::Left, 90) => {
Direction::East
}
(Direction::East, Rotation::Right, 90) | (Direction::West, Rotation::Left, 90) => {
Direction::South
}
(Direction::South, Rotation::Right, 90) | (Direction::North, Rotation::Left, 90) => {
Direction::West
}
_ => panic!("Unexpected rotation amount {}", amount),
}
}
pub fn opposite(self) -> Direction {
match self {
Direction::North => Direction::South,
Direction::South => Direction::North,
Direction::East => Direction::West,
Direction::West => Direction::East,
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Rotation {
Left,
Right,
}
impl Rotation {
pub fn opposite(&self) -> Rotation {
match self {
Rotation::Right => Rotation::Left,
Rotation::Left => Rotation::Right,
}
}
}
/// Clamp `amount` between 0 and 180 degrees, by modifying `rotation` accordingly.
///
/// ```ignore
/// assert_eq!(normalize_rotation(Rotation::Left, -90), (Rotation::Right, 90));
/// assert_eq!(normalize_rotation(Rotation::Right, 270), (Rotation::Left, 90));
/// ```
fn normalize_rotation(rotation: Rotation, amount: i32) -> (Rotation, u32) {
let (rotation, amount) = if amount < 0 {
(rotation.opposite(), amount.abs() as u32)
} else {
(rotation, amount as u32)
};
let amount = amount % 360;
if amount > 180 {
(rotation.opposite(), 360 - amount)
} else {
(rotation, amount)
}
}
#[cfg(test)]
mod tests {
use super::*;
const EXAMPLE: &str = "F10
N3
F7
R90
F11";
#[test]
fn test_part1() {
assert_eq!(Day12::solve1(EXAMPLE).unwrap(), 25);
}
#[test]
fn test_part2() {
assert_eq!(Day12::solve1(EXAMPLE).unwrap(), 286);
}
}
crate::benchmark_challenge!(crate::day12::Day12);
|
/*!
this crate serves as the installer for the artifice network, however it can also be used as a simple task scheduler
# Example
```
use manager::{ArtificeDB, Database};
use installer::installation::*;
use std::time::Duration;
use manager::database::ArtificePeers;
use std::io::{Read, Write};
use networking::ArtificeConfig;
fn main(){
let database = ArtificeDB::create("/home/user/.artifice").unwrap();
let password = "hello_world".to_string();
let mut installer = Installer::new(InstallationSrc::NewCompiled, database, 4, Duration::from_secs(5000000));
let first_task = Task::<std::io::Error, ArtificeDB>::new(1, "create", move |database, schedule|{
let peers: ArtificePeers = database.create_table("peers".to_string(), &password.clone().into_bytes())?;
let config: ArtificeConfig = database.load_entry("config".to_string(), &password.clone().into_bytes())?;
Ok(())
});
installer.add_task(first_task);
installer.run();
}
```
*/
#[macro_use]extern crate serde_derive;
///
/// this module is where the primary functionality of this crate is located
///
pub mod installation;
/// create and manage tasks (stored in Installer)
pub use installation::TaskSchedule;
/// installer for the artifice Network
pub use installation::Installer;
/// represents a task to be executed
pub use installation::Task; |
use std::cmp;
use rand::Rng;
use rand::distributions::{Weighted, WeightedChoice, IndependentSample};
use super::data::{
Object,
Tile,
Map,
Rect,
Transition,
Fighter,
DeathCallback,
Ai,
Item,
Game,
Tcod,
MessageLog,
};
use super::util;
use crate::{PLAYER, MAP_HEIGHT, MAP_WIDTH, ROOM_MAX_SIZE, ROOM_MIN_SIZE, MAX_ROOMS};
use super::rendering;
use crate::app::equipment::{Equipment, Slot};
// TODO: Refactor out map to game
pub fn create_room(room: Rect, map: &mut Map) {
for x in (room.x1 + 1)..room.x2 {
for y in (room.y1 + 1)..room.y2 {
map[x as usize][y as usize] = Tile::empty();
}
}
}
// TODO: Refactor out map to game
pub fn create_h_tunnel(x1: i32, x2: i32, y: i32, map: &mut Map) {
for x in cmp::min(x1, x2)..(cmp::max(x1, x2) + 1) {
map[x as usize][y as usize] = Tile::empty();
}
}
// TODO: Refactor out map to game
pub fn create_v_tunnel(y1: i32, y2: i32, x: i32, map: &mut Map) {
for y in cmp::min(y1, y2)..(cmp::max(y1, y2) + 1) {
map[x as usize][y as usize] = Tile::empty();
}
}
// TODO: Refactor out map to game
pub fn is_blocked(x: i32, y: i32, map: &Map, objects: &[Object]) -> bool {
if map[x as usize][y as usize].blocked {
return true;
}
objects.iter().any(|object| {
object.blocks && object.pos() == (x, y)
})
}
// TODO: Refactor out map to game
pub fn place_objects(room: Rect, map: &Map, objects: &mut Vec<Object>, level: u32) {
let max_monsters = util::vec_from_dungeon_level(
Transition::max_monsters(),
level,
);
let num_monsters = rand::thread_rng().gen_range(0, max_monsters + 1);
let troll_chance = util::vec_from_dungeon_level(
Transition::troll_chance(),
level,
);
let draco_chance = util::vec_from_dungeon_level(
Transition::draco_chance(),
level,
);
// A random table of monsters to potentially create
let monster_chances = &mut [
Weighted {
weight: 80,
item: "orc",
},
Weighted {
weight: troll_chance,
item: "troll",
},
Weighted {
weight: draco_chance,
item: "draco",
},
];
let monster_choice = WeightedChoice::new(monster_chances);
for _ in 0..num_monsters {
let x = rand::thread_rng().gen_range(room.x1 + 1, room.x2);
let y = rand::thread_rng().gen_range(room.y1 + 1, room.y2);
if !is_blocked(x, y, map, objects) {
let mut monster = match monster_choice.ind_sample(&mut rand::thread_rng()) {
"orc" => {
let mut orc = Object::new(x, y, 'o', tcod::colors::DESATURATED_GREEN, "orc", true);
orc.fighter = Some(Fighter {
base_max_hp: 20,
hp: 20,
base_power: 4,
base_defense: 0,
on_death: DeathCallback::Monster,
xp: 35,
});
orc.ai = Some(Ai::Basic);
orc
}
"troll" => {
let mut troll = Object::new(x, y, 'T', tcod::colors::DARKER_GREEN, "troll", true);
troll.fighter = Some(Fighter {
base_max_hp: 30,
hp: 16,
base_power: 8,
base_defense: 2,
on_death: DeathCallback::Monster,
xp: 100,
});
troll.ai = Some(Ai::Basic);
troll
}
"draco" => {
let mut draco = Object::new(x, y, 'd', tcod::colors::DARK_BLUE, "draco", true);
draco.fighter = Some(Fighter {
base_max_hp: 25,
hp: 25,
base_power: 5,
base_defense: 1,
on_death: DeathCallback::Monster,
xp: 65,
});
draco.ai = Some(Ai::Basic);
draco
}
_ => unreachable!(),
};
monster.alive = true;
objects.push(monster);
}
}
let max_items = util::vec_from_dungeon_level(
Transition::max_items(),
level,
);
// A random table of items to create
let item_chances = &mut [
Weighted {
weight: 35,
item: Item::Heal,
},
Weighted {
weight: util::from_dungeon_level(
&[Transition::new(4, 25)],
level
),
item: Item::Lightning,
},
Weighted {
weight: util::from_dungeon_level(
&[Transition::new(4, 25)],
level
),
item: Item::Fireball,
},
Weighted {
weight: util::from_dungeon_level(
&[Transition::new(2,10)],
level,
),
item: Item::Confuse,
},
Weighted {
weight: util::from_dungeon_level(
&[Transition::new(4,5)],
level
),
item: Item::Sword },
Weighted {
weight: util::from_dungeon_level(
&[Transition::new(8, 15)],
level
),
item: Item::Shield,
}
];
let num_items = rand::thread_rng().gen_range(0, max_items + 1);
let item_choice = WeightedChoice::new(item_chances);
for _ in 0..num_items {
let x = rand::thread_rng().gen_range(room.x1 + 1, room.x2);
let y = rand::thread_rng().gen_range(room.y1 + 1, room.y2);
if !is_blocked(x, y, map, objects) {
// let dice = rand::random::<f32>();
let mut item = match item_choice.ind_sample(&mut rand::thread_rng()) {
Item::Heal => {
let mut object = Object::new(x, y, '!', tcod::colors::VIOLET, "healing potion", false);
object.item = Some(Item::Heal);
object
}
Item::Lightning => {
let mut object = Object::new(
x,
y,
'#',
tcod::colors::LIGHT_YELLOW,
"scroll of lightning bolt",
false,
);
object.item = Some(Item::Lightning);
object
}
Item::Fireball => {
let mut object =
Object::new(x, y, '#', tcod::colors::ORANGE, "scroll of fireball", false);
object.item = Some(Item::Fireball);
object
}
Item::Confuse => {
let mut object = Object::new(
x,
y,
'#',
tcod::colors::LIGHT_AZURE,
"scroll of confusion",
false
);
object.item = Some(Item::Confuse);
object
}
Item::Sword => {
let mut object = Object::new(x, y, '/',tcod::colors::SKY,"sword",false);
object.item = Some(Item::Sword);
object.equipment = Some(Equipment{
equipped: false,
slot: Slot::RightHand,
power_bonus: 3,
defense_bonus: 0,
max_hp_bonus: 0,
});
object
}
Item::Shield => {
let mut object = Object::new(x, y, '[', tcod::colors::DARKER_ORANGE, "shield", false);
object.item = Some(Item::Shield);
object.equipment = Some(Equipment {
equipped: false,
slot: Slot::LeftHand,
power_bonus: 0,
defense_bonus: 1,
max_hp_bonus: 0,
});
object
}
};
item.always_visible = true;
objects.push(item);
}
}
}
// Make Map
pub fn make_map(objects: &mut Vec<Object>, level: u32) -> Map {
let mut map = vec![vec![Tile::wall(); MAP_HEIGHT as usize]; MAP_WIDTH as usize];
assert_eq!(&objects[PLAYER] as *const _, &objects[0] as *const _);
objects.truncate(1);
let mut rooms= vec![];
for _ in 0..MAX_ROOMS {
// Random room width and height
let w = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
let h = rand::thread_rng().gen_range(ROOM_MIN_SIZE, ROOM_MAX_SIZE + 1);
// Random room position within map boundaries
let x = rand::thread_rng().gen_range(0, MAP_WIDTH - w);
let y = rand::thread_rng().gen_range(0, MAP_HEIGHT - h);
let new_room = Rect::new(x, y, w, h);
let failed = rooms
.iter()
.any(|other_room| new_room.intersects_with(other_room));
if !failed {
create_room(new_room, &mut map);
place_objects(new_room, &map, objects, level);
let (new_x, new_y) = new_room.center();
if rooms.is_empty() {
objects[PLAYER].set_pos(new_x, new_y);
} else {
let (prev_x, prev_y) = rooms[rooms.len() - 1].center();
if rand::random() {
create_h_tunnel(prev_x, new_x, prev_y, &mut map);
create_v_tunnel(prev_y, new_y, new_x, &mut map);
} else {
create_v_tunnel(prev_y, new_y, prev_x, &mut map);
create_h_tunnel(prev_x, new_x, new_y, &mut map);
}
}
rooms.push(new_room);
}
}
// Create stairs at the center of the last room generated.
let (last_room_x, last_room_y) = rooms[rooms.len() - 1].center();
let mut stairs = Object::new(
last_room_x,
last_room_y,
'<',
tcod::colors::WHITE,
"stairs",
false,
);
stairs.always_visible = true;
objects.push(stairs);
map
}
pub fn next_level(objects: &mut Vec<Object>, game: &mut Game, tcod: &mut Tcod) {
game.log.add(
"You take a moment to rst, and recover your strength.",
tcod::colors::VIOLET,
);
let heal_hp = objects[PLAYER].max_hp(game) / 2;
objects[PLAYER].heal(heal_hp, game);
game.log.add(
"After a rare moment of peace, you descend deeper into the catacombs...",
tcod::colors::RED,
);
game.dungeon_level += 1;
game.map = make_map(objects, game.dungeon_level);
rendering::init_fov(&game.map, tcod);
}
|
#[path = "lib/block.rs"] pub mod block;
#[path = "lib/transaction.rs"] pub mod transaction;
#[path = "lib/network.rs"] pub mod network;
use sha2::{ Sha256, Digest };
fn main() {
let mut hasher = Sha256::default();
hasher.update("SIMPLE DUMB BLOCK CHAIN");
let first_hash = Digest::finalize(hasher);
let mut gen_block = block::Block::new_block(10, &first_hash.iter().cloned().collect());
println!("{}", gen_block);
let n_transaction = transaction::Transaction::create_new_transaction(String::from("Saad"), String::from("Ahmed"), 8);
gen_block.add_transaction(n_transaction);
println!("{}", gen_block);
}
pub fn add_new_transaction(curr_block: &mut block::Block, transaction: transaction::Transaction) {
if curr_block.get_transactions_count() > 50 {
// Some logic
} else {
curr_block.add_transaction(transaction);
}
} |
use super::*;
#[test]
fn STX_test() {
let mut registers = Registers::new();
let mut bus = BusMock::new();
let opeland = 0x90;
registers.X = 0x89;
Calculator::STX(®isters, &mut bus, opeland);
let actual = bus.read(opeland as u16);
assert_eq!(actual, registers.X);
} |
fn bare() {}
fn likes_block(f: fn()) { f() }
fn main() {
likes_block(bare);
} |
use serde::Deserialize;
#[derive(Deserialize)]
pub struct UserAgent {
pub user_agent: String,
}
|
mod query;
mod search_term;
mod source_item;
pub use self::query::Query;
pub use self::search_term::{
ExactTerm, ExactTermType, FuzzyTerm, FuzzyTermType, InverseTerm, InverseTermType, SearchTerm,
TermType,
};
pub use self::source_item::{FilteredItem, FuzzyText, MatchType, MatchingText, SourceItem};
|
use error::RunningResult;
pub mod converter;
pub(crate) mod error;
pub mod parser;
pub mod pb_item;
pub mod utils;
pub fn parse_to_pretty(data: &[u8], sif: bool) -> RunningResult<String> {
let pb_items = self::parser::parse_pb_data(data, sif)?;
let obj = self::converter::build_json(pb_items)?;
let mut result = serde_json::to_string_pretty(&obj).unwrap_or(String::new());
result.push('\n');
Ok(result)
}
|
//! TODO: mod-level docs
use core::marker::PhantomData;
use core::sync::atomic::Ordering;
use conquer_pointer::MarkedPtr;
use crate::retired::Retired;
use crate::traits::{Protect, Reclaim, ReclaimBase, ReclaimRef, ReclaimThreadState};
use crate::NotEqual;
/// A specialization of the [`Atomic`](crate::atomic::Atomic) type using
/// [`Leaking`] as reclaimer.
pub type Atomic<T, const N: usize> = crate::atomic::Atomic<T, Leaking, N>;
/// A specialization of the [`Owned`](crate::Owned) type using [`Leaking`] as
/// reclaimer.
pub type Owned<T, const N: usize> = crate::Owned<T, Leaking, N>;
/// A specialization of the [`Protected`](crate::Protected) type using
/// [`Leaking`] as reclaimer.
pub type Protected<'g, T, const N: usize> = crate::Protected<'g, T, Leaking, N>;
/// A specialization of the [`Shared`](crate::Shared) type using [`Leaking`] as
/// reclaimer.
pub type Shared<'g, T, const N: usize> = crate::Shared<'g, T, Leaking, N>;
/// A specialization of the [`Unlinked`](crate::Unlinked) type using [`Leaking`]
/// as reclaimer.
pub type Unlinked<T, const N: usize> = crate::Unlinked<T, Leaking, N>;
/// A specialization of the [`Unprotected`](crate::Unprotected) type using
/// [`Leaking`] as reclaimer.
pub type Unprotected<T, const N: usize> = crate::Unprotected<T, Leaking, N>;
// *************************************************************************************************
// Leaking
// *************************************************************************************************
#[derive(Debug, Default, Hash, Eq, Ord, PartialEq, PartialOrd)]
pub struct Leaking;
/********** impl ReclaimBase **********************************************************************/
unsafe impl ReclaimBase for Leaking {
type Header = ();
type Retired = ();
}
/********** impl Reclaim **************************************************************************/
unsafe impl<T> Reclaim<T> for Leaking {
#[inline(always)]
unsafe fn retire(ptr: *mut T) -> *mut () {
ptr.cast()
}
}
/********** impl ReclaimRef ***********************************************************************/
unsafe impl<T> ReclaimRef<T> for Leaking {
type Reclaim = Self;
type ThreadState = Self;
#[inline(always)]
fn alloc_owned<const N: usize>(&self, value: T) -> Owned<T, N> {
Owned::new(value)
}
#[inline(always)]
unsafe fn build_thread_state_unchecked(&self) -> Self::ThreadState {
Leaking
}
}
/********** impl ReclaimThreadState ***************************************************************/
unsafe impl<T> ReclaimThreadState<T> for Leaking {
type Reclaim = Self;
type Guard = Guard;
#[inline(always)]
fn derived_from(&self, _: &impl ReclaimRef<T, Reclaim = Self::Reclaim>) -> bool {
true
}
#[inline(always)]
fn build_guard(&self) -> Self::Guard {
Guard
}
#[inline(always)]
fn alloc_owned<const N: usize>(&self, value: T) -> Owned<T, N> {
Owned::new(value)
}
#[inline(always)]
unsafe fn retire_record(&self, _: Retired<Leaking>) {}
}
// *************************************************************************************************
// Guard
// *************************************************************************************************
#[derive(Clone, Copy, Debug, Default, Hash, Eq, Ord, PartialEq, PartialOrd)]
pub struct Guard;
macro_rules! impl_protect {
() => {
type Reclaim = Leaking;
#[inline]
fn protect<const N: usize>(
&mut self,
atomic: &Atomic<T, N>,
order: Ordering,
) -> Protected<T, N> {
Protected { inner: atomic.load_raw(order), _marker: PhantomData }
}
#[inline]
fn protect_if_equal<const N: usize>(
&mut self,
atomic: &Atomic<T, N>,
expected: MarkedPtr<T, N>,
order: Ordering,
) -> Result<Protected<T, N>, NotEqual> {
atomic
.load_raw_if_equal(expected, order)
.map(|inner| Protected { inner, _marker: PhantomData })
}
};
}
/********** impl Protect (Guard) ******************************************************************/
unsafe impl<T> Protect<T> for Guard {
impl_protect!();
}
/********** impl Protect (&Guard) *****************************************************************/
unsafe impl<T> Protect<T> for &Guard {
impl_protect!();
}
|
#[doc = "Register `PIDR4` reader"]
pub type R = crate::R<PIDR4_SPEC>;
#[doc = "Field `JEP106CON` reader - JEP106 continuation code"]
pub type JEP106CON_R = crate::FieldReader;
#[doc = "Field `SIZE` reader - register file size"]
pub type SIZE_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - JEP106 continuation code"]
#[inline(always)]
pub fn jep106con(&self) -> JEP106CON_R {
JEP106CON_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - register file size"]
#[inline(always)]
pub fn size(&self) -> SIZE_R {
SIZE_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
#[doc = "DBGMCU CoreSight peripheral identity register 4\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pidr4::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PIDR4_SPEC;
impl crate::RegisterSpec for PIDR4_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`pidr4::R`](R) reader structure"]
impl crate::Readable for PIDR4_SPEC {}
#[doc = "`reset()` method sets PIDR4 to value 0"]
impl crate::Resettable for PIDR4_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! CPUID
use volatile_register::RO;
#[cfg(not(armv6m))]
use volatile_register::RW;
#[cfg(not(armv6m))]
use crate::peripheral::CPUID;
/// Register block
#[repr(C)]
pub struct RegisterBlock {
/// CPUID base
pub base: RO<u32>,
_reserved0: [u32; 15],
/// Processor Feature (not present on Cortex-M0 variants)
#[cfg(not(armv6m))]
pub pfr: [RO<u32>; 2],
#[cfg(armv6m)]
_reserved1: [u32; 2],
/// Debug Feature (not present on Cortex-M0 variants)
#[cfg(not(armv6m))]
pub dfr: RO<u32>,
#[cfg(armv6m)]
_reserved2: u32,
/// Auxiliary Feature (not present on Cortex-M0 variants)
#[cfg(not(armv6m))]
pub afr: RO<u32>,
#[cfg(armv6m)]
_reserved3: u32,
/// Memory Model Feature (not present on Cortex-M0 variants)
#[cfg(not(armv6m))]
pub mmfr: [RO<u32>; 4],
#[cfg(armv6m)]
_reserved4: [u32; 4],
/// Instruction Set Attribute (not present on Cortex-M0 variants)
#[cfg(not(armv6m))]
pub isar: [RO<u32>; 5],
#[cfg(armv6m)]
_reserved5: [u32; 5],
_reserved6: u32,
/// Cache Level ID (only present on Cortex-M7)
#[cfg(not(armv6m))]
pub clidr: RO<u32>,
/// Cache Type (only present on Cortex-M7)
#[cfg(not(armv6m))]
pub ctr: RO<u32>,
/// Cache Size ID (only present on Cortex-M7)
#[cfg(not(armv6m))]
pub ccsidr: RO<u32>,
/// Cache Size Selection (only present on Cortex-M7)
#[cfg(not(armv6m))]
pub csselr: RW<u32>,
}
/// Type of cache to select on CSSELR writes.
#[cfg(not(armv6m))]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum CsselrCacheType {
/// Select DCache or unified cache
DataOrUnified = 0,
/// Select ICache
Instruction = 1,
}
#[cfg(not(armv6m))]
impl CPUID {
/// Selects the current CCSIDR
///
/// * `level`: the required cache level minus 1, e.g. 0 for L1, 1 for L2
/// * `ind`: select instruction cache or data/unified cache
///
/// `level` is masked to be between 0 and 7.
#[inline]
pub fn select_cache(&mut self, level: u8, ind: CsselrCacheType) {
const CSSELR_IND_POS: u32 = 0;
const CSSELR_IND_MASK: u32 = 1 << CSSELR_IND_POS;
const CSSELR_LEVEL_POS: u32 = 1;
const CSSELR_LEVEL_MASK: u32 = 0x7 << CSSELR_LEVEL_POS;
unsafe {
self.csselr.write(
((u32::from(level) << CSSELR_LEVEL_POS) & CSSELR_LEVEL_MASK)
| (((ind as u32) << CSSELR_IND_POS) & CSSELR_IND_MASK),
)
}
}
/// Returns the number of sets and ways in the selected cache
#[inline]
pub fn cache_num_sets_ways(&mut self, level: u8, ind: CsselrCacheType) -> (u16, u16) {
const CCSIDR_NUMSETS_POS: u32 = 13;
const CCSIDR_NUMSETS_MASK: u32 = 0x7FFF << CCSIDR_NUMSETS_POS;
const CCSIDR_ASSOCIATIVITY_POS: u32 = 3;
const CCSIDR_ASSOCIATIVITY_MASK: u32 = 0x3FF << CCSIDR_ASSOCIATIVITY_POS;
self.select_cache(level, ind);
crate::asm::dsb();
let ccsidr = self.ccsidr.read();
(
(1 + ((ccsidr & CCSIDR_NUMSETS_MASK) >> CCSIDR_NUMSETS_POS)) as u16,
(1 + ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >> CCSIDR_ASSOCIATIVITY_POS)) as u16,
)
}
/// Returns log2 of the number of words in the smallest cache line of all the data cache and
/// unified caches that are controlled by the processor.
///
/// This is the `DminLine` field of the CTR register.
#[inline(always)]
pub fn cache_dminline() -> u32 {
const CTR_DMINLINE_POS: u32 = 16;
const CTR_DMINLINE_MASK: u32 = 0xF << CTR_DMINLINE_POS;
let ctr = unsafe { (*Self::PTR).ctr.read() };
(ctr & CTR_DMINLINE_MASK) >> CTR_DMINLINE_POS
}
/// Returns log2 of the number of words in the smallest cache line of all the instruction
/// caches that are controlled by the processor.
///
/// This is the `IminLine` field of the CTR register.
#[inline(always)]
pub fn cache_iminline() -> u32 {
const CTR_IMINLINE_POS: u32 = 0;
const CTR_IMINLINE_MASK: u32 = 0xF << CTR_IMINLINE_POS;
let ctr = unsafe { (*Self::PTR).ctr.read() };
(ctr & CTR_IMINLINE_MASK) >> CTR_IMINLINE_POS
}
}
|
use conrod::{Ui, color, Scalar, widget, Colorable, Positionable, Widget};
use conrod::widget::{Common, CommonBuilder, UpdateArgs, PointPath};
use conrod::widget::id::{Id, Generator};
use conrod::widget::line::Cap;
use conrod::widget::line::Style as LineStyle;
use conrod::position::Point;
use conrod::event;
use conrod::input::Key;
use support::id::IdPool;
use paint::paint_window::WindowAction;
pub struct PaintArea {
/// Handles some of the dirty work of rendering a GUI.
common: CommonBuilder,
/// See the Style struct below.
style: Style,
}
#[derive(Copy, Clone, Debug, Default, PartialEq, WidgetStyle)]
pub struct Style {
}
widget_ids! {
struct Ids {
active,
}
}
#[derive(Copy, Clone, PartialEq)]
enum MouseState {
None,
Pressed,
Cancelled,
}
/// Represents the unique, cached state for our PaintArea widget.
pub struct State {
mouse_state: MouseState,
id_pool: IdPool,
ids: Ids,
line_ids: Vec<Id>,
lines: Vec<Vec<[f64; 2]>>,
points: Vec<[f64; 2]>,
}
#[derive(PartialEq)]
enum PaintAction {
Cancel,
Press(Point),
Drag(Point),
Release,
}
impl PaintArea {
pub fn new() -> Self {
PaintArea {
common: CommonBuilder::default(),
style: Style::default(),
}
}
fn handle_input(&self, ui: &Ui, mouse_state: MouseState) -> Option<PaintAction> {
if let Some(key_id) = ui.global_input().current.widget_capturing_keyboard {
'events: for widget_event in ui.widget_input(key_id).events() {
match widget_event {
event::Widget::Press(press) => match press.button {
event::Button::Keyboard(key) => match key {
Key::Escape => {
return Some(PaintAction::Cancel)
},
_ => ()
},
_ => (),
},
_ => ()
}
}
}
let press_option = ui.global_input().current.mouse.buttons.pressed().next();
if press_option.is_some() {
let xy = ui.global_input().current.mouse.xy;
if mouse_state == MouseState::Cancelled {
return None
} else if mouse_state == MouseState::Pressed {
return Some(PaintAction::Drag(xy))
} else {
return Some(PaintAction::Press(xy))
}
} else if (mouse_state == MouseState::Pressed || mouse_state == MouseState::Cancelled) &&
ui.global_input().current.mouse.buttons.left().is_up() {
return Some(PaintAction::Release)
}
None
}
fn handle_action(&self, state: &mut widget::State<<PaintArea as Widget>::State>,
action: PaintAction) {
match action {
PaintAction::Cancel => {
state.update(|state| {
state.points.clear();
state.mouse_state = MouseState::Cancelled;
});
},
PaintAction::Press(point) => {
state.update(|state| {
state.points.push(point);
state.mouse_state = MouseState::Pressed;
});
},
PaintAction::Drag(point) => {
state.update(|state| {
state.points.push(point);
});
},
PaintAction::Release => {
state.update(|state| {
if let Some(new_id) = state.id_pool.get() {
if state.points.len() > 1 && state.mouse_state == MouseState::Pressed {
println!("Added line! {}", state.points.len());
state.line_ids.push(new_id);
state.lines.push(state.points.clone());
}
} else {
println!("No ids left!");
// TODO -- should probably panic, or alert user
}
state.mouse_state = MouseState::None;
state.points.clear();
});
}
}
}
}
impl Common for PaintArea {
fn common(&self) -> &CommonBuilder {
&self.common
}
fn common_mut(&mut self) -> &mut CommonBuilder {
&mut self.common
}
}
impl Widget for PaintArea {
type State = State;
type Style = Style;
/// The event produced by instantiating the widget.
///
/// `Some` when clicked, otherwise `None`.
type Event = Option<WindowAction>;
fn init_state<'b>(&self, id_gen: Generator) -> Self::State {
State { mouse_state: MouseState::None,
id_pool: IdPool::new(),
ids: Ids::new(id_gen),
line_ids: vec![],
lines: vec![],
points: vec![], }
}
fn style(&self) -> Self::Style {
self.style.clone()
}
fn update(self, args: UpdateArgs<Self>) -> Self::Event {
let UpdateArgs { id, mut state, mut ui, .. } = args;
if let Some(action) = self.handle_input(&ui, state.mouse_state) {
// Make sure we have enough Ids in the pool, in case a Widget is created
state.update(|state| {
state.id_pool.repopulate(&mut ui.widget_id_generator());
});
self.handle_action(&mut state, action);
}
const THICK: Scalar = 12.0;
for (line, &line_id) in state.lines.iter().zip(state.line_ids.iter()) {
PointPath::abs_styled(line.clone(), LineStyle::new().cap(Cap::Round))
.middle_of(id)
.color(color::BLACK)
.thickness(THICK)
.set(line_id, ui);
}
PointPath::abs_styled(state.points.clone(), LineStyle::new().cap(Cap::Round))
.middle_of(id)
.color(color::BLACK)
.thickness(THICK)
.set(state.ids.active, ui);
None
}
}
|
use nalgebra::{Point2, Vector2};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub enum Message {
Ping,
ClientMessage(ClientMessage),
ServerMessage(ServerMessage),
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub enum ClientMessage {
EnterGame,
PaddleUp,
PaddleDown,
PaddleStop,
TogglePause,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub enum ServerMessage {
SetName(String),
PaddleUp(PaddleId),
PaddleDown(PaddleId),
PaddleStop(PaddleId),
GameState(GameState),
}
#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)]
pub enum PaddleId {
Left,
Right,
}
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
pub enum PaddleState {
Up,
Down,
Still,
}
impl Default for PaddleState {
fn default() -> Self {
PaddleState::Still
}
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct GameState {
pub left_paddle_y: f64,
pub left_paddle_state: PaddleState,
pub right_paddle_y: f64,
pub right_paddle_state: PaddleState,
pub left_player_name: String,
pub right_player_name: String,
pub ball_position: Point2<f64>,
pub ball_velocity: Vector2<f64>,
pub paused: bool,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_serde() {
let msg = Message::ServerMessage(ServerMessage::SetName(String::from("Richo")));
let str = serde_json::to_string(&msg).expect("serialize to json");
let expected = r#"{"ServerMessage":{"SetName":"Richo"}}"#;
assert_eq!(expected, str);
let enum_again = serde_json::from_str(str.as_str()).expect("deserialize json");
assert_eq!(msg, enum_again);
}
}
|
use std::collections::HashMap;
#[derive(Debug)]
struct MemoryGame {
index: usize,
last_number: usize,
last_indeces: HashMap<usize, usize>
}
impl MemoryGame {
fn new(starters: &[usize]) -> Self {
let mut last_indeces = HashMap::new();
for (i, &n) in starters[..starters.len() - 1].iter().enumerate() {
last_indeces.insert(n, i);
}
Self { index: starters.len() - 1, last_indeces, last_number: *starters.last().unwrap() }
}
fn generate(&mut self, size: usize) {
while self.index < size - 1 {
let next_number = match self.last_indeces.get(&self.last_number) {
Some(index) => self.index - index,
None => 0
};
self.last_indeces.insert(self.last_number, self.index);
self.index += 1;
self.last_number = next_number;
}
}
}
fn main() {
// Part one
let mut game = MemoryGame::new(&[17, 1, 3, 16, 19, 0]);
game.generate(2020);
println!("Part one: {}", game.last_number);
// Part one
game.generate(30000000);
println!("Part two: {}", game.last_number);
}
#[test]
fn test_part_one() {
let mut game = MemoryGame::new(&[0, 3, 6]);
game.generate(2020);
assert_eq!(game.last_number, 436);
game.generate(30000000);
assert_eq!(game.last_number, 175594);
} |
use super::record_content::RecordData;
use super::record_header::RecordHeader;
use super::record_datum::RecordDatum;
#[derive(Debug)]
pub struct DataMessage {
header: RecordHeader,
data: Vec<RecordDatum>
}
impl DataMessage {
pub fn new(raw_header: RecordHeader) -> DataMessage {
DataMessage {
header: raw_header,
data: Vec::new()
}
}
pub fn push_datum(&mut self, d: RecordDatum) {
self.data.push(d);
}
}
impl RecordData for DataMessage {
fn get_header(&self) -> RecordHeader {
return self.header.clone();
}
}
|
#![cfg_attr(feature = "cargo-clippy", allow(clippy::boxed_local))]
use crate::{
error::VelociError,
persistence::*,
search::{sort::check_apply_top_n_sort, *},
util::{self, StringAdd},
};
use fnv::FnvHashMap;
use itertools::Itertools;
use num::{self, ToPrimitive};
use std::{self, cmp::Ordering};
fn get_top_facet_group<T: IndexIdToParentData>(hits: &FnvHashMap<T, usize>, top: Option<usize>) -> Vec<(T, u32)> {
let groups: Vec<(T, u32)> = hits.iter().map(|ref tupl| (*tupl.0, tupl.1.to_u32().unwrap())).collect();
sort_and_apply_top_skip_group(groups, top)
}
fn sort_and_apply_top_skip_group<T: IndexIdToParentData>(mut groups: Vec<(T, u32)>, top: Option<usize>) -> Vec<(T, u32)> {
groups.sort_unstable_by(|a, b| b.1.cmp(&a.1));
apply_top_skip(&mut groups, None, top);
groups
}
fn get_groups_with_text(persistence: &Persistence, groups: &[(u32, u32)], field: &str) -> Vec<(String, usize)> {
groups.iter().map(|el| (get_text_for_id(persistence, field, el.0), el.1 as usize)).collect()
}
// TODO Check ignorecase, check duplicates in facet data
// For ignorecase, we probably need a term_ids -> lower case term id mapping index - read all texts annd aggregate may be too slow
pub fn get_facet(persistence: &Persistence, req: &FacetRequest, ids: &[u32]) -> Result<Vec<(String, usize)>, VelociError> {
info_time!("facets in field {:?}", req.field);
trace!("get_facet for ids {:?}", ids);
let steps = util::get_steps_to_anchor(&req.field);
info!("facet on {:?}", steps);
// one step facet special case
if steps.len() == 1 || persistence.has_index(&(steps.last().unwrap().add(ANCHOR_TO_TEXT_ID))) {
let path = if steps.len() == 1 {
steps.first().unwrap().add(PARENT_TO_VALUE_ID)
} else {
steps.last().unwrap().add(ANCHOR_TO_TEXT_ID)
};
let kv_store = persistence.get_valueid_to_parent(path)?;
let hits = {
debug_time!("facet count_values_for_ids {:?}", req.field);
kv_store.count_values_for_ids(ids, req.top.map(|el| el as u32))
};
debug_time!("facet collect and get texts {:?}", req.field);
let groups = get_top_facet_group(&hits, req.top);
let groups_with_text = get_groups_with_text(persistence, &groups, steps.last().unwrap());
debug!("{:?}", groups_with_text);
return Ok(groups_with_text);
}
let mut next_level_ids = join_anchor_to_leaf(persistence, ids, &steps)?;
let mut groups = vec![];
{
debug_time!("facet group by field {:?}", req.field);
next_level_ids.sort_unstable();
for (key, group) in &next_level_ids.into_iter().group_by(|el| *el) {
groups.push((key, group.count() as u32));
}
groups = sort_and_apply_top_skip_group(groups, req.top);
}
let groups_with_text = get_groups_with_text(persistence, &groups, steps.last().unwrap());
debug!("{:?}", groups_with_text);
Ok(groups_with_text)
}
pub(crate) fn join_anchor_to_leaf(persistence: &Persistence, ids: &[u32], steps: &[String]) -> Result<Vec<u32>, VelociError> {
let mut next_level_ids = { join_for_n_to_m(persistence, ids, &(steps.first().unwrap().add(PARENT_TO_VALUE_ID)))? };
for step in steps.iter().skip(1) {
trace!("facet step {:?}", step);
next_level_ids = join_for_n_to_m(persistence, &next_level_ids, &(step.add(PARENT_TO_VALUE_ID)))?;
}
Ok(next_level_ids)
}
fn join_for_n_to_m(persistence: &Persistence, value_ids: &[u32], path: &str) -> Result<Vec<u32>, VelociError> {
let kv_store = persistence.get_valueid_to_parent(path)?;
let mut hits = vec![];
hits.reserve(value_ids.len()); // TODO reserve by statistics
kv_store.append_values_for_ids(value_ids, &mut hits);
trace!("hits {:?}", hits);
Ok(hits)
}
pub(crate) trait AggregationCollector<T: IndexIdToParentData> {
fn add(&mut self, id: T);
fn to_map(self: Box<Self>, top: Option<u32>) -> FnvHashMap<T, usize>;
}
fn get_top_n_sort_from_iter<T: num::Zero + std::cmp::PartialOrd + Copy + std::fmt::Debug, K: Copy + std::fmt::Debug, I: Iterator<Item = (K, T)>>(
iter: I,
top_n: usize,
) -> Vec<(K, T)> {
let mut new_data: Vec<(K, T)> = vec![];
let mut worst_score = T::zero();
for el in iter {
if el.1 < worst_score {
continue;
}
check_apply_top_n_sort(
&mut new_data,
top_n as u32,
&|a, b| b.1.partial_cmp(&a.1).unwrap_or(Ordering::Equal),
&mut |the_worst: &(K, T)| worst_score = the_worst.1,
);
new_data.push((el.0, el.1));
}
new_data
}
impl<T: IndexIdToParentData> AggregationCollector<T> for Vec<T> {
fn to_map(self: Box<Self>, top: Option<u32>) -> FnvHashMap<T, usize> {
debug_time!("aggregation vec to_map");
if let Some(top) = top {
get_top_n_sort_from_iter(self.iter().enumerate().filter(|el| *el.1 != T::zero()).map(|el| (el.0, *el.1)), top as usize)
.into_iter()
.map(|el| (num::cast(el.0).unwrap(), num::cast(el.1).unwrap()))
.collect()
} else {
let mut groups: Vec<(u32, T)> = self.iter().enumerate().filter(|el| *el.1 != T::zero()).map(|el| (el.0 as u32, *el.1)).collect();
groups.sort_by(|a, b| b.1.cmp(&a.1));
groups.into_iter().map(|el| (num::cast(el.0).unwrap(), num::cast(el.1).unwrap())).collect()
}
}
#[inline]
fn add(&mut self, id: T) {
let id_usize = id.to_usize().unwrap();
debug_assert!(self.len() > id_usize, "max_value_id metadata wrong, therefore facet vec wrong size");
unsafe {
let elem = self.get_unchecked_mut(id_usize);
*elem = *elem + T::one();
}
}
}
impl<T: IndexIdToParentData> AggregationCollector<T> for FnvHashMap<T, usize> {
fn to_map(self: Box<Self>, _top: Option<u32>) -> FnvHashMap<T, usize> {
*self
}
#[inline]
fn add(&mut self, id: T) {
let stat = self.entry(id).or_insert(0);
*stat += 1;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.