repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/script.rs | crates/cheatcodes/src/script.rs | //! Implementations of [`Scripting`](spec::Group::Scripting) cheatcodes.
use crate::{Cheatcode, CheatsCtxt, Result, Vm::*, evm::journaled_account};
use alloy_consensus::{SidecarBuilder, SimpleCoder};
use alloy_primitives::{Address, B256, U256, Uint};
use alloy_rpc_types::Authorization;
use alloy_signer::SignerSync;
use alloy_signer_local::PrivateKeySigner;
use alloy_sol_types::SolValue;
use foundry_wallets::{WalletSigner, wallet_multi::MultiWallet};
use parking_lot::Mutex;
use revm::{
bytecode::Bytecode,
context::JournalTr,
context_interface::transaction::SignedAuthorization,
primitives::{KECCAK_EMPTY, hardfork::SpecId},
};
use std::sync::Arc;
impl Cheatcode for broadcast_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
broadcast(ccx, None, true)
}
}
impl Cheatcode for broadcast_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { signer } = self;
broadcast(ccx, Some(signer), true)
}
}
impl Cheatcode for broadcast_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { privateKey } = self;
broadcast_key(ccx, privateKey, true)
}
}
impl Cheatcode for attachDelegation_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { signedDelegation } = self;
attach_delegation(ccx, signedDelegation, false)
}
}
impl Cheatcode for attachDelegation_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { signedDelegation, crossChain } = self;
attach_delegation(ccx, signedDelegation, *crossChain)
}
}
impl Cheatcode for signDelegation_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey } = *self;
sign_delegation(ccx, privateKey, implementation, None, false, false)
}
}
impl Cheatcode for signDelegation_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey, nonce } = *self;
sign_delegation(ccx, privateKey, implementation, Some(nonce), false, false)
}
}
impl Cheatcode for signDelegation_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey, crossChain } = *self;
sign_delegation(ccx, privateKey, implementation, None, crossChain, false)
}
}
impl Cheatcode for signAndAttachDelegation_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey } = *self;
sign_delegation(ccx, privateKey, implementation, None, false, true)
}
}
impl Cheatcode for signAndAttachDelegation_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey, nonce } = *self;
sign_delegation(ccx, privateKey, implementation, Some(nonce), false, true)
}
}
impl Cheatcode for signAndAttachDelegation_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { implementation, privateKey, crossChain } = *self;
sign_delegation(ccx, privateKey, implementation, None, crossChain, true)
}
}
/// Helper function to attach an EIP-7702 delegation.
fn attach_delegation(
ccx: &mut CheatsCtxt,
delegation: &SignedDelegation,
cross_chain: bool,
) -> Result {
let SignedDelegation { v, r, s, nonce, implementation } = delegation;
// Set chain id to 0 if universal deployment is preferred.
// See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-7702.md#protection-from-malleability-cross-chain
let chain_id = if cross_chain { U256::from(0) } else { U256::from(ccx.ecx.cfg.chain_id) };
let auth = Authorization { address: *implementation, nonce: *nonce, chain_id };
let signed_auth = SignedAuthorization::new_unchecked(
auth,
*v,
U256::from_be_bytes(r.0),
U256::from_be_bytes(s.0),
);
write_delegation(ccx, signed_auth.clone())?;
ccx.state.add_delegation(signed_auth);
Ok(Default::default())
}
/// Helper function to sign and attach (if needed) an EIP-7702 delegation.
/// Uses the provided nonce, otherwise retrieves and increments the nonce of the EOA.
fn sign_delegation(
ccx: &mut CheatsCtxt,
private_key: Uint<256, 4>,
implementation: Address,
nonce: Option<u64>,
cross_chain: bool,
attach: bool,
) -> Result<Vec<u8>> {
let signer = PrivateKeySigner::from_bytes(&B256::from(private_key))?;
let nonce = if let Some(nonce) = nonce {
nonce
} else {
let authority_acc = ccx.ecx.journaled_state.load_account(signer.address())?;
// Calculate next nonce considering existing active delegations
next_delegation_nonce(
&ccx.state.active_delegations,
signer.address(),
&ccx.state.broadcast,
authority_acc.data.info.nonce,
)
};
let chain_id = if cross_chain { U256::from(0) } else { U256::from(ccx.ecx.cfg.chain_id) };
let auth = Authorization { address: implementation, nonce, chain_id };
let sig = signer.sign_hash_sync(&auth.signature_hash())?;
// Attach delegation.
if attach {
let signed_auth = SignedAuthorization::new_unchecked(auth, sig.v() as u8, sig.r(), sig.s());
write_delegation(ccx, signed_auth.clone())?;
ccx.state.add_delegation(signed_auth);
}
Ok(SignedDelegation {
v: sig.v() as u8,
r: sig.r().into(),
s: sig.s().into(),
nonce,
implementation,
}
.abi_encode())
}
/// Returns the next valid nonce for a delegation, considering existing active delegations.
fn next_delegation_nonce(
active_delegations: &[SignedAuthorization],
authority: Address,
broadcast: &Option<Broadcast>,
account_nonce: u64,
) -> u64 {
match active_delegations
.iter()
.rfind(|auth| auth.recover_authority().is_ok_and(|recovered| recovered == authority))
{
Some(auth) => {
// Increment nonce of last recorded delegation.
auth.nonce + 1
}
None => {
// First time a delegation is added for this authority.
if let Some(broadcast) = broadcast {
// Increment nonce if authority is the sender of transaction.
if broadcast.new_origin == authority {
return account_nonce + 1;
}
}
// Return current nonce if authority is not the sender of transaction.
account_nonce
}
}
}
fn write_delegation(ccx: &mut CheatsCtxt, auth: SignedAuthorization) -> Result<()> {
let authority = auth.recover_authority().map_err(|e| format!("{e}"))?;
let authority_acc = ccx.ecx.journaled_state.load_account(authority)?;
let expected_nonce = next_delegation_nonce(
&ccx.state.active_delegations,
authority,
&ccx.state.broadcast,
authority_acc.data.info.nonce,
);
if expected_nonce != auth.nonce {
return Err(format!(
"invalid nonce for {authority:?}: expected {expected_nonce}, got {}",
auth.nonce
)
.into());
}
if auth.address.is_zero() {
// Set empty code if the delegation address of authority is 0x.
// See https://github.com/ethereum/EIPs/blob/master/EIPS/eip-7702.md#behavior.
ccx.ecx.journaled_state.set_code_with_hash(authority, Bytecode::default(), KECCAK_EMPTY);
} else {
let bytecode = Bytecode::new_eip7702(*auth.address());
ccx.ecx.journaled_state.set_code(authority, bytecode);
}
Ok(())
}
impl Cheatcode for attachBlobCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { blob } = self;
ensure!(
ccx.ecx.cfg.spec >= SpecId::CANCUN,
"`attachBlob` is not supported before the Cancun hard fork; \
see EIP-4844: https://eips.ethereum.org/EIPS/eip-4844"
);
let sidecar: SidecarBuilder<SimpleCoder> = SidecarBuilder::from_slice(blob);
let sidecar = sidecar.build().map_err(|e| format!("{e}"))?;
ccx.state.active_blob_sidecar = Some(sidecar);
Ok(Default::default())
}
}
impl Cheatcode for startBroadcast_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
broadcast(ccx, None, false)
}
}
impl Cheatcode for startBroadcast_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { signer } = self;
broadcast(ccx, Some(signer), false)
}
}
impl Cheatcode for startBroadcast_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { privateKey } = self;
broadcast_key(ccx, privateKey, false)
}
}
impl Cheatcode for stopBroadcastCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
let Some(broadcast) = ccx.state.broadcast.take() else {
bail!("no broadcast in progress to stop");
};
debug!(target: "cheatcodes", ?broadcast, "stopped");
Ok(Default::default())
}
}
impl Cheatcode for getWalletsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let wallets = ccx.state.wallets().signers().unwrap_or_default();
Ok(wallets.abi_encode())
}
}
#[derive(Clone, Debug, Default)]
pub struct Broadcast {
/// Address of the transaction origin
pub new_origin: Address,
/// Original caller
pub original_caller: Address,
/// Original `tx.origin`
pub original_origin: Address,
/// Depth of the broadcast
pub depth: usize,
/// Whether the prank stops by itself after the next call
pub single_call: bool,
/// Whether `vm.deployCode` cheatcode is used to deploy from code.
pub deploy_from_code: bool,
}
/// Contains context for wallet management.
#[derive(Debug)]
pub struct WalletsInner {
/// All signers in scope of the script.
pub multi_wallet: MultiWallet,
/// Optional signer provided as `--sender` flag.
pub provided_sender: Option<Address>,
}
/// Cloneable wrapper around [`WalletsInner`].
#[derive(Debug, Clone)]
pub struct Wallets {
/// Inner data.
pub inner: Arc<Mutex<WalletsInner>>,
}
impl Wallets {
#[expect(missing_docs)]
pub fn new(multi_wallet: MultiWallet, provided_sender: Option<Address>) -> Self {
Self { inner: Arc::new(Mutex::new(WalletsInner { multi_wallet, provided_sender })) }
}
/// Consumes [Wallets] and returns [MultiWallet].
///
/// Panics if [Wallets] is still in use.
pub fn into_multi_wallet(self) -> MultiWallet {
Arc::into_inner(self.inner)
.map(|m| m.into_inner().multi_wallet)
.unwrap_or_else(|| panic!("not all instances were dropped"))
}
/// Locks inner Mutex and adds a signer to the [MultiWallet].
pub fn add_private_key(&self, private_key: &B256) -> Result<()> {
self.add_local_signer(PrivateKeySigner::from_bytes(private_key)?);
Ok(())
}
/// Locks inner Mutex and adds a signer to the [MultiWallet].
pub fn add_local_signer(&self, wallet: PrivateKeySigner) {
self.inner.lock().multi_wallet.add_signer(WalletSigner::Local(wallet));
}
/// Locks inner Mutex and returns all signer addresses in the [MultiWallet].
pub fn signers(&self) -> Result<Vec<Address>> {
Ok(self.inner.lock().multi_wallet.signers()?.keys().copied().collect())
}
/// Number of signers in the [MultiWallet].
pub fn len(&self) -> usize {
let mut inner = self.inner.lock();
let signers = inner.multi_wallet.signers();
if signers.is_err() {
return 0;
}
signers.unwrap().len()
}
/// Whether the [MultiWallet] is empty.
pub fn is_empty(&self) -> bool {
self.len() == 0
}
}
/// Sets up broadcasting from a script using `new_origin` as the sender.
fn broadcast(ccx: &mut CheatsCtxt, new_origin: Option<&Address>, single_call: bool) -> Result {
let depth = ccx.ecx.journaled_state.depth();
ensure!(
ccx.state.get_prank(depth).is_none(),
"you have an active prank; broadcasting and pranks are not compatible"
);
ensure!(ccx.state.broadcast.is_none(), "a broadcast is active already");
let mut new_origin = new_origin.copied();
if new_origin.is_none() {
let mut wallets = ccx.state.wallets().inner.lock();
if let Some(provided_sender) = wallets.provided_sender {
new_origin = Some(provided_sender);
} else {
let signers = wallets.multi_wallet.signers()?;
if signers.len() == 1 {
let address = signers.keys().next().unwrap();
new_origin = Some(*address);
}
}
}
let new_origin = new_origin.unwrap_or(ccx.ecx.tx.caller);
// Ensure new origin is loaded and touched.
let _ = journaled_account(ccx.ecx, new_origin)?;
let broadcast = Broadcast {
new_origin,
original_caller: ccx.caller,
original_origin: ccx.ecx.tx.caller,
depth,
single_call,
deploy_from_code: false,
};
debug!(target: "cheatcodes", ?broadcast, "started");
ccx.state.broadcast = Some(broadcast);
Ok(Default::default())
}
/// Sets up broadcasting from a script with the sender derived from `private_key`.
/// Adds this private key to `state`'s `wallets` vector to later be used for signing
/// if broadcast is successful.
fn broadcast_key(ccx: &mut CheatsCtxt, private_key: &U256, single_call: bool) -> Result {
let wallet = super::crypto::parse_wallet(private_key)?;
let new_origin = wallet.address();
let result = broadcast(ccx, Some(&new_origin), single_call);
if result.is_ok() {
let wallets = ccx.state.wallets();
wallets.add_local_signer(wallet);
}
result
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/inspector.rs | crates/cheatcodes/src/inspector.rs | //! Cheatcode EVM inspector.
use crate::{
CheatsConfig, CheatsCtxt, DynCheatcode, Error, Result,
Vm::{self, AccountAccess},
evm::{
DealRecord, GasRecord, RecordAccess, journaled_account,
mock::{MockCallDataContext, MockCallReturnData},
prank::Prank,
},
inspector::utils::CommonCreateInput,
script::{Broadcast, Wallets},
test::{
assume::AssumeNoRevert,
expect::{
self, ExpectedCallData, ExpectedCallTracker, ExpectedCallType, ExpectedCreate,
ExpectedEmitTracker, ExpectedRevert, ExpectedRevertKind,
},
revert_handlers,
},
utils::IgnoredTraces,
};
use alloy_consensus::BlobTransactionSidecar;
use alloy_evm::eth::EthEvmContext;
use alloy_network::TransactionBuilder4844;
use alloy_primitives::{
Address, B256, Bytes, Log, TxKind, U256, hex,
map::{AddressHashMap, HashMap, HashSet},
};
use alloy_rpc_types::{
AccessList,
request::{TransactionInput, TransactionRequest},
};
use alloy_sol_types::{SolCall, SolInterface, SolValue};
use foundry_common::{
SELECTOR_LEN, TransactionMaybeSigned,
mapping_slots::{MappingSlots, step as mapping_step},
};
use foundry_evm_core::{
Breakpoints, ContextExt, InspectorExt,
abi::Vm::stopExpectSafeMemoryCall,
backend::{DatabaseError, DatabaseExt, RevertDiagnostic},
constants::{CHEATCODE_ADDRESS, HARDHAT_CONSOLE_ADDRESS, MAGIC_ASSUME},
evm::{FoundryEvm, new_evm_with_existing_context},
};
use foundry_evm_traces::{
TracingInspector, TracingInspectorConfig, identifier::SignaturesIdentifier,
};
use foundry_wallets::wallet_multi::MultiWallet;
use itertools::Itertools;
use proptest::test_runner::{RngAlgorithm, TestRng, TestRunner};
use rand::Rng;
use revm::{
Inspector, Journal,
bytecode::opcode as op,
context::{BlockEnv, JournalTr, LocalContext, TransactionType, result::EVMError},
context_interface::{CreateScheme, transaction::SignedAuthorization},
handler::FrameResult,
interpreter::{
CallInputs, CallOutcome, CallScheme, CreateInputs, CreateOutcome, FrameInput, Gas, Host,
InstructionResult, Interpreter, InterpreterAction, InterpreterResult,
interpreter_types::{Jumps, LoopControl, MemoryTr},
},
primitives::hardfork::SpecId,
};
use serde_json::Value;
use std::{
cmp::max,
collections::{BTreeMap, VecDeque},
fs::File,
io::BufReader,
ops::Range,
path::PathBuf,
sync::{Arc, OnceLock},
};
mod utils;
pub mod analysis;
pub use analysis::CheatcodeAnalysis;
pub type Ecx<'a, 'b, 'c> = &'a mut EthEvmContext<&'b mut (dyn DatabaseExt + 'c)>;
/// Helper trait for obtaining complete [revm::Inspector] instance from mutable reference to
/// [Cheatcodes].
///
/// This is needed for cases when inspector itself needs mutable access to [Cheatcodes] state and
/// allows us to correctly execute arbitrary EVM frames from inside cheatcode implementations.
pub trait CheatcodesExecutor {
/// Core trait method accepting mutable reference to [Cheatcodes] and returning
/// [revm::Inspector].
fn get_inspector<'a>(&'a mut self, cheats: &'a mut Cheatcodes) -> Box<dyn InspectorExt + 'a>;
/// Obtains [FoundryEvm] instance and executes the given CREATE frame.
fn exec_create(
&mut self,
inputs: CreateInputs,
ccx: &mut CheatsCtxt,
) -> Result<CreateOutcome, EVMError<DatabaseError>> {
with_evm(self, ccx, |evm| {
evm.journaled_state.depth += 1;
let frame = FrameInput::Create(Box::new(inputs));
let outcome = match evm.run_execution(frame)? {
FrameResult::Call(_) => unreachable!(),
FrameResult::Create(create) => create,
};
evm.journaled_state.depth -= 1;
Ok(outcome)
})
}
fn console_log(&mut self, ccx: &mut CheatsCtxt, msg: &str) {
self.get_inspector(ccx.state).console_log(msg);
}
/// Returns a mutable reference to the tracing inspector if it is available.
fn tracing_inspector(&mut self) -> Option<&mut TracingInspector> {
None
}
}
/// Constructs [FoundryEvm] and runs a given closure with it.
fn with_evm<E, F, O>(
executor: &mut E,
ccx: &mut CheatsCtxt,
f: F,
) -> Result<O, EVMError<DatabaseError>>
where
E: CheatcodesExecutor + ?Sized,
F: for<'a, 'b> FnOnce(
&mut FoundryEvm<'a, &'b mut dyn InspectorExt>,
) -> Result<O, EVMError<DatabaseError>>,
{
let mut inspector = executor.get_inspector(ccx.state);
let error = std::mem::replace(&mut ccx.ecx.error, Ok(()));
let ctx = EthEvmContext {
block: ccx.ecx.block.clone(),
cfg: ccx.ecx.cfg.clone(),
tx: ccx.ecx.tx.clone(),
journaled_state: Journal {
inner: ccx.ecx.journaled_state.inner.clone(),
database: &mut *ccx.ecx.journaled_state.database as &mut dyn DatabaseExt,
},
local: LocalContext::default(),
chain: (),
error,
};
let mut evm = new_evm_with_existing_context(ctx, &mut *inspector);
let res = f(&mut evm)?;
let ctx = evm.into_context();
ccx.ecx.journaled_state.inner = ctx.journaled_state.inner;
ccx.ecx.block = ctx.block;
ccx.ecx.tx = ctx.tx;
ccx.ecx.cfg = ctx.cfg;
ccx.ecx.error = ctx.error;
Ok(res)
}
/// Basic implementation of [CheatcodesExecutor] that simply returns the [Cheatcodes] instance as an
/// inspector.
#[derive(Debug, Default, Clone, Copy)]
struct TransparentCheatcodesExecutor;
impl CheatcodesExecutor for TransparentCheatcodesExecutor {
fn get_inspector<'a>(&'a mut self, cheats: &'a mut Cheatcodes) -> Box<dyn InspectorExt + 'a> {
Box::new(cheats)
}
}
macro_rules! try_or_return {
($e:expr) => {
match $e {
Ok(v) => v,
Err(_) => return,
}
};
}
/// Contains additional, test specific resources that should be kept for the duration of the test
#[derive(Debug, Default)]
pub struct TestContext {
/// Buffered readers for files opened for reading (path => BufReader mapping)
pub opened_read_files: HashMap<PathBuf, BufReader<File>>,
}
/// Every time we clone `Context`, we want it to be empty
impl Clone for TestContext {
fn clone(&self) -> Self {
Default::default()
}
}
impl TestContext {
/// Clears the context.
pub fn clear(&mut self) {
self.opened_read_files.clear();
}
}
/// Helps collecting transactions from different forks.
#[derive(Clone, Debug)]
pub struct BroadcastableTransaction {
/// The optional RPC URL.
pub rpc: Option<String>,
/// The transaction to broadcast.
pub transaction: TransactionMaybeSigned,
}
#[derive(Clone, Debug, Copy)]
pub struct RecordDebugStepInfo {
/// The debug trace node index when the recording starts.
pub start_node_idx: usize,
/// The original tracer config when the recording starts.
pub original_tracer_config: TracingInspectorConfig,
}
/// Holds gas metering state.
#[derive(Clone, Debug, Default)]
pub struct GasMetering {
/// True if gas metering is paused.
pub paused: bool,
/// True if gas metering was resumed or reset during the test.
/// Used to reconcile gas when frame ends (if spent less than refunded).
pub touched: bool,
/// True if gas metering should be reset to frame limit.
pub reset: bool,
/// Stores paused gas frames.
pub paused_frames: Vec<Gas>,
/// The group and name of the active snapshot.
pub active_gas_snapshot: Option<(String, String)>,
/// Cache of the amount of gas used in previous call.
/// This is used by the `lastCallGas` cheatcode.
pub last_call_gas: Option<crate::Vm::Gas>,
/// True if gas recording is enabled.
pub recording: bool,
/// The gas used in the last frame.
pub last_gas_used: u64,
/// Gas records for the active snapshots.
pub gas_records: Vec<GasRecord>,
}
impl GasMetering {
/// Start the gas recording.
pub fn start(&mut self) {
self.recording = true;
}
/// Stop the gas recording.
pub fn stop(&mut self) {
self.recording = false;
}
/// Resume paused gas metering.
pub fn resume(&mut self) {
if self.paused {
self.paused = false;
self.touched = true;
}
self.paused_frames.clear();
}
/// Reset gas to limit.
pub fn reset(&mut self) {
self.paused = false;
self.touched = true;
self.reset = true;
self.paused_frames.clear();
}
}
/// Holds data about arbitrary storage.
#[derive(Clone, Debug, Default)]
pub struct ArbitraryStorage {
/// Mapping of arbitrary storage addresses to generated values (slot, arbitrary value).
/// (SLOADs return random value if storage slot wasn't accessed).
/// Changed values are recorded and used to copy storage to different addresses.
pub values: HashMap<Address, HashMap<U256, U256>>,
/// Mapping of address with storage copied to arbitrary storage address source.
pub copies: HashMap<Address, Address>,
/// Address with storage slots that should be overwritten even if previously set.
pub overwrites: HashSet<Address>,
}
impl ArbitraryStorage {
/// Marks an address with arbitrary storage.
pub fn mark_arbitrary(&mut self, address: &Address, overwrite: bool) {
self.values.insert(*address, HashMap::default());
if overwrite {
self.overwrites.insert(*address);
} else {
self.overwrites.remove(address);
}
}
/// Maps an address that copies storage with the arbitrary storage address.
pub fn mark_copy(&mut self, from: &Address, to: &Address) {
if self.values.contains_key(from) {
self.copies.insert(*to, *from);
}
}
/// Saves arbitrary storage value for a given address:
/// - store value in changed values cache.
/// - update account's storage with given value.
pub fn save(&mut self, ecx: Ecx, address: Address, slot: U256, data: U256) {
self.values.get_mut(&address).expect("missing arbitrary address entry").insert(slot, data);
let (db, journal, _) = ecx.as_db_env_and_journal();
if journal.load_account(db, address).is_ok() {
journal
.sstore(db, address, slot, data, false)
.expect("could not set arbitrary storage value");
}
}
/// Copies arbitrary storage value from source address to the given target address:
/// - if a value is present in arbitrary values cache, then update target storage and return
/// existing value.
/// - if no value was yet generated for given slot, then save new value in cache and update both
/// source and target storages.
pub fn copy(&mut self, ecx: Ecx, target: Address, slot: U256, new_value: U256) -> U256 {
let source = self.copies.get(&target).expect("missing arbitrary copy target entry");
let storage_cache = self.values.get_mut(source).expect("missing arbitrary source storage");
let value = match storage_cache.get(&slot) {
Some(value) => *value,
None => {
storage_cache.insert(slot, new_value);
// Update source storage with new value.
let (db, journal, _) = ecx.as_db_env_and_journal();
if journal.load_account(db, *source).is_ok() {
journal
.sstore(db, *source, slot, new_value, false)
.expect("could not copy arbitrary storage value");
}
new_value
}
};
// Update target storage with new value.
let (db, journal, _) = ecx.as_db_env_and_journal();
if journal.load_account(db, target).is_ok() {
journal.sstore(db, target, slot, value, false).expect("could not set storage");
}
value
}
}
/// List of transactions that can be broadcasted.
pub type BroadcastableTransactions = VecDeque<BroadcastableTransaction>;
/// An EVM inspector that handles calls to various cheatcodes, each with their own behavior.
///
/// Cheatcodes can be called by contracts during execution to modify the VM environment, such as
/// mocking addresses, signatures and altering call reverts.
///
/// Executing cheatcodes can be very powerful. Most cheatcodes are limited to evm internals, but
/// there are also cheatcodes like `ffi` which can execute arbitrary commands or `writeFile` and
/// `readFile` which can manipulate files of the filesystem. Therefore, several restrictions are
/// implemented for these cheatcodes:
/// - `ffi`, and file cheatcodes are _always_ opt-in (via foundry config) and never enabled by
/// default: all respective cheatcode handlers implement the appropriate checks
/// - File cheatcodes require explicit permissions which paths are allowed for which operation, see
/// `Config.fs_permission`
/// - Only permitted accounts are allowed to execute cheatcodes in forking mode, this ensures no
/// contract deployed on the live network is able to execute cheatcodes by simply calling the
/// cheatcode address: by default, the caller, test contract and newly deployed contracts are
/// allowed to execute cheatcodes
#[derive(Clone, Debug)]
pub struct Cheatcodes {
/// Solar compiler instance, to grant syntactic and semantic analysis capabilities
pub analysis: Option<CheatcodeAnalysis>,
/// The block environment
///
/// Used in the cheatcode handler to overwrite the block environment separately from the
/// execution block environment.
pub block: Option<BlockEnv>,
/// Currently active EIP-7702 delegations that will be consumed when building the next
/// transaction. Set by `vm.attachDelegation()` and consumed via `.take()` during
/// transaction construction.
pub active_delegations: Vec<SignedAuthorization>,
/// The active EIP-4844 blob that will be attached to the next call.
pub active_blob_sidecar: Option<BlobTransactionSidecar>,
/// The gas price.
///
/// Used in the cheatcode handler to overwrite the gas price separately from the gas price
/// in the execution environment.
pub gas_price: Option<u128>,
/// Address labels
pub labels: AddressHashMap<String>,
/// Prank information, mapped to the call depth where pranks were added.
pub pranks: BTreeMap<usize, Prank>,
/// Expected revert information
pub expected_revert: Option<ExpectedRevert>,
/// Assume next call can revert and discard fuzz run if it does.
pub assume_no_revert: Option<AssumeNoRevert>,
/// Additional diagnostic for reverts
pub fork_revert_diagnostic: Option<RevertDiagnostic>,
/// Recorded storage reads and writes
pub accesses: RecordAccess,
/// Whether storage access recording is currently active
pub recording_accesses: bool,
/// Recorded account accesses (calls, creates) organized by relative call depth, where the
/// topmost vector corresponds to accesses at the depth at which account access recording
/// began. Each vector in the matrix represents a list of accesses at a specific call
/// depth. Once that call context has ended, the last vector is removed from the matrix and
/// merged into the previous vector.
pub recorded_account_diffs_stack: Option<Vec<Vec<AccountAccess>>>,
/// The information of the debug step recording.
pub record_debug_steps_info: Option<RecordDebugStepInfo>,
/// Recorded logs
pub recorded_logs: Option<Vec<crate::Vm::Log>>,
/// Mocked calls
// **Note**: inner must a BTreeMap because of special `Ord` impl for `MockCallDataContext`
pub mocked_calls: HashMap<Address, BTreeMap<MockCallDataContext, VecDeque<MockCallReturnData>>>,
/// Mocked functions. Maps target address to be mocked to pair of (calldata, mock address).
pub mocked_functions: HashMap<Address, HashMap<Bytes, Address>>,
/// Expected calls
pub expected_calls: ExpectedCallTracker,
/// Expected emits
pub expected_emits: ExpectedEmitTracker,
/// Expected creates
pub expected_creates: Vec<ExpectedCreate>,
/// Map of context depths to memory offset ranges that may be written to within the call depth.
pub allowed_mem_writes: HashMap<u64, Vec<Range<u64>>>,
/// Current broadcasting information
pub broadcast: Option<Broadcast>,
/// Scripting based transactions
pub broadcastable_transactions: BroadcastableTransactions,
/// Current EIP-2930 access lists.
pub access_list: Option<AccessList>,
/// Additional, user configurable context this Inspector has access to when inspecting a call.
pub config: Arc<CheatsConfig>,
/// Test-scoped context holding data that needs to be reset every test run
pub test_context: TestContext,
/// Whether to commit FS changes such as file creations, writes and deletes.
/// Used to prevent duplicate changes file executing non-committing calls.
pub fs_commit: bool,
/// Serialized JSON values.
// **Note**: both must a BTreeMap to ensure the order of the keys is deterministic.
pub serialized_jsons: BTreeMap<String, BTreeMap<String, Value>>,
/// All recorded ETH `deal`s.
pub eth_deals: Vec<DealRecord>,
/// Gas metering state.
pub gas_metering: GasMetering,
/// Contains gas snapshots made over the course of a test suite.
// **Note**: both must a BTreeMap to ensure the order of the keys is deterministic.
pub gas_snapshots: BTreeMap<String, BTreeMap<String, String>>,
/// Mapping slots.
pub mapping_slots: Option<AddressHashMap<MappingSlots>>,
/// The current program counter.
pub pc: usize,
/// Breakpoints supplied by the `breakpoint` cheatcode.
/// `char -> (address, pc)`
pub breakpoints: Breakpoints,
/// Whether the next contract creation should be intercepted to return its initcode.
pub intercept_next_create_call: bool,
/// Optional cheatcodes `TestRunner`. Used for generating random values from uint and int
/// strategies.
test_runner: Option<TestRunner>,
/// Ignored traces.
pub ignored_traces: IgnoredTraces,
/// Addresses with arbitrary storage.
pub arbitrary_storage: Option<ArbitraryStorage>,
/// Deprecated cheatcodes mapped to the reason. Used to report warnings on test results.
pub deprecated: HashMap<&'static str, Option<&'static str>>,
/// Unlocked wallets used in scripts and testing of scripts.
pub wallets: Option<Wallets>,
/// Signatures identifier for decoding events and functions
signatures_identifier: OnceLock<Option<SignaturesIdentifier>>,
/// Used to determine whether the broadcasted call has dynamic gas limit.
pub dynamic_gas_limit: bool,
// Custom execution evm version.
pub execution_evm_version: Option<SpecId>,
}
// This is not derived because calling this in `fn new` with `..Default::default()` creates a second
// `CheatsConfig` which is unused, and inside it `ProjectPathsConfig` is relatively expensive to
// create.
impl Default for Cheatcodes {
fn default() -> Self {
Self::new(Arc::default())
}
}
impl Cheatcodes {
/// Creates a new `Cheatcodes` with the given settings.
pub fn new(config: Arc<CheatsConfig>) -> Self {
Self {
analysis: None,
fs_commit: true,
labels: config.labels.clone(),
config,
block: Default::default(),
active_delegations: Default::default(),
active_blob_sidecar: Default::default(),
gas_price: Default::default(),
pranks: Default::default(),
expected_revert: Default::default(),
assume_no_revert: Default::default(),
fork_revert_diagnostic: Default::default(),
accesses: Default::default(),
recording_accesses: Default::default(),
recorded_account_diffs_stack: Default::default(),
recorded_logs: Default::default(),
record_debug_steps_info: Default::default(),
mocked_calls: Default::default(),
mocked_functions: Default::default(),
expected_calls: Default::default(),
expected_emits: Default::default(),
expected_creates: Default::default(),
allowed_mem_writes: Default::default(),
broadcast: Default::default(),
broadcastable_transactions: Default::default(),
access_list: Default::default(),
test_context: Default::default(),
serialized_jsons: Default::default(),
eth_deals: Default::default(),
gas_metering: Default::default(),
gas_snapshots: Default::default(),
mapping_slots: Default::default(),
pc: Default::default(),
breakpoints: Default::default(),
intercept_next_create_call: Default::default(),
test_runner: Default::default(),
ignored_traces: Default::default(),
arbitrary_storage: Default::default(),
deprecated: Default::default(),
wallets: Default::default(),
signatures_identifier: Default::default(),
dynamic_gas_limit: Default::default(),
execution_evm_version: None,
}
}
/// Enables cheatcode analysis capabilities by providing a solar compiler instance.
pub fn set_analysis(&mut self, analysis: CheatcodeAnalysis) {
self.analysis = Some(analysis);
}
/// Returns the configured prank at given depth or the first prank configured at a lower depth.
/// For example, if pranks configured for depth 1, 3 and 5, the prank for depth 4 is the one
/// configured at depth 3.
pub fn get_prank(&self, depth: usize) -> Option<&Prank> {
self.pranks.range(..=depth).last().map(|(_, prank)| prank)
}
/// Returns the configured wallets if available, else creates a new instance.
pub fn wallets(&mut self) -> &Wallets {
self.wallets.get_or_insert_with(|| Wallets::new(MultiWallet::default(), None))
}
/// Sets the unlocked wallets.
pub fn set_wallets(&mut self, wallets: Wallets) {
self.wallets = Some(wallets);
}
/// Adds a delegation to the active delegations list.
pub fn add_delegation(&mut self, authorization: SignedAuthorization) {
self.active_delegations.push(authorization);
}
/// Returns the signatures identifier.
pub fn signatures_identifier(&self) -> Option<&SignaturesIdentifier> {
self.signatures_identifier.get_or_init(|| SignaturesIdentifier::new(true).ok()).as_ref()
}
/// Decodes the input data and applies the cheatcode.
fn apply_cheatcode(
&mut self,
ecx: Ecx,
call: &CallInputs,
executor: &mut dyn CheatcodesExecutor,
) -> Result {
// decode the cheatcode call
let decoded = Vm::VmCalls::abi_decode(&call.input.bytes(ecx)).map_err(|e| {
if let alloy_sol_types::Error::UnknownSelector { name: _, selector } = e {
let msg = format!(
"unknown cheatcode with selector {selector}; \
you may have a mismatch between the `Vm` interface (likely in `forge-std`) \
and the `forge` version"
);
return alloy_sol_types::Error::Other(std::borrow::Cow::Owned(msg));
}
e
})?;
let caller = call.caller;
// ensure the caller is allowed to execute cheatcodes,
// but only if the backend is in forking mode
ecx.journaled_state.database.ensure_cheatcode_access_forking_mode(&caller)?;
apply_dispatch(
&decoded,
&mut CheatsCtxt { state: self, ecx, gas_limit: call.gas_limit, caller },
executor,
)
}
/// Grants cheat code access for new contracts if the caller also has
/// cheatcode access or the new contract is created in top most call.
///
/// There may be cheatcodes in the constructor of the new contract, in order to allow them
/// automatically we need to determine the new address.
fn allow_cheatcodes_on_create(&self, ecx: Ecx, caller: Address, created_address: Address) {
if ecx.journaled_state.depth <= 1
|| ecx.journaled_state.database.has_cheatcode_access(&caller)
{
ecx.journaled_state.database.allow_cheatcode_access(created_address);
}
}
/// Apply EIP-2930 access list.
///
/// If the transaction type is [TransactionType::Legacy] we need to upgrade it to
/// [TransactionType::Eip2930] in order to use access lists. Other transaction types support
/// access lists themselves.
fn apply_accesslist(&mut self, ecx: Ecx) {
if let Some(access_list) = &self.access_list {
ecx.tx.access_list = access_list.clone();
if ecx.tx.tx_type == TransactionType::Legacy as u8 {
ecx.tx.tx_type = TransactionType::Eip2930 as u8;
}
}
}
/// Called when there was a revert.
///
/// Cleanup any previously applied cheatcodes that altered the state in such a way that revm's
/// revert would run into issues.
pub fn on_revert(&mut self, ecx: Ecx) {
trace!(deals=?self.eth_deals.len(), "rolling back deals");
// Delay revert clean up until expected revert is handled, if set.
if self.expected_revert.is_some() {
return;
}
// we only want to apply cleanup top level
if ecx.journaled_state.depth() > 0 {
return;
}
// Roll back all previously applied deals
// This will prevent overflow issues in revm's [`JournaledState::journal_revert`] routine
// which rolls back any transfers.
while let Some(record) = self.eth_deals.pop() {
if let Some(acc) = ecx.journaled_state.state.get_mut(&record.address) {
acc.info.balance = record.old_balance;
}
}
}
pub fn call_with_executor(
&mut self,
ecx: Ecx,
call: &mut CallInputs,
executor: &mut dyn CheatcodesExecutor,
) -> Option<CallOutcome> {
// Apply custom execution evm version.
if let Some(spec_id) = self.execution_evm_version {
ecx.cfg.spec = spec_id;
}
let gas = Gas::new(call.gas_limit);
let curr_depth = ecx.journaled_state.depth();
// At the root call to test function or script `run()`/`setUp()` functions, we are
// decreasing sender nonce to ensure that it matches on-chain nonce once we start
// broadcasting.
if curr_depth == 0 {
let sender = ecx.tx.caller;
let account = match super::evm::journaled_account(ecx, sender) {
Ok(account) => account,
Err(err) => {
return Some(CallOutcome {
result: InterpreterResult {
result: InstructionResult::Revert,
output: err.abi_encode().into(),
gas,
},
memory_offset: call.return_memory_offset.clone(),
was_precompile_called: false,
precompile_call_logs: vec![],
});
}
};
let prev = account.info.nonce;
account.info.nonce = prev.saturating_sub(1);
trace!(target: "cheatcodes", %sender, nonce=account.info.nonce, prev, "corrected nonce");
}
if call.target_address == CHEATCODE_ADDRESS {
return match self.apply_cheatcode(ecx, call, executor) {
Ok(retdata) => Some(CallOutcome {
result: InterpreterResult {
result: InstructionResult::Return,
output: retdata.into(),
gas,
},
memory_offset: call.return_memory_offset.clone(),
was_precompile_called: true,
precompile_call_logs: vec![],
}),
Err(err) => Some(CallOutcome {
result: InterpreterResult {
result: InstructionResult::Revert,
output: err.abi_encode().into(),
gas,
},
memory_offset: call.return_memory_offset.clone(),
was_precompile_called: false,
precompile_call_logs: vec![],
}),
};
}
if call.target_address == HARDHAT_CONSOLE_ADDRESS {
return None;
}
// Handle expected calls
// Grab the different calldatas expected.
if let Some(expected_calls_for_target) = self.expected_calls.get_mut(&call.bytecode_address)
{
// Match every partial/full calldata
for (calldata, (expected, actual_count)) in expected_calls_for_target {
// Increment actual times seen if...
// The calldata is at most, as big as this call's input, and
if calldata.len() <= call.input.len() &&
// Both calldata match, taking the length of the assumed smaller one (which will have at least the selector), and
*calldata == call.input.bytes(ecx)[..calldata.len()] &&
// The value matches, if provided
expected
.value.is_none_or(|value| Some(value) == call.transfer_value()) &&
// The gas matches, if provided
expected.gas.is_none_or(|gas| gas == call.gas_limit) &&
// The minimum gas matches, if provided
expected.min_gas.is_none_or(|min_gas| min_gas <= call.gas_limit)
{
*actual_count += 1;
}
}
}
// Handle mocked calls
if let Some(mocks) = self.mocked_calls.get_mut(&call.bytecode_address) {
let ctx = MockCallDataContext {
calldata: call.input.bytes(ecx),
value: call.transfer_value(),
};
if let Some(return_data_queue) = match mocks.get_mut(&ctx) {
Some(queue) => Some(queue),
None => mocks
.iter_mut()
.find(|(mock, _)| {
call.input.bytes(ecx).get(..mock.calldata.len()) == Some(&mock.calldata[..])
&& mock.value.is_none_or(|value| Some(value) == call.transfer_value())
})
.map(|(_, v)| v),
} && let Some(return_data) = if return_data_queue.len() == 1 {
// If the mocked calls stack has a single element in it, don't empty it
return_data_queue.front().map(|x| x.to_owned())
} else {
// Else, we pop the front element
return_data_queue.pop_front()
} {
return Some(CallOutcome {
result: InterpreterResult {
result: return_data.ret_type,
output: return_data.data,
gas,
},
memory_offset: call.return_memory_offset.clone(),
was_precompile_called: true,
precompile_call_logs: vec![],
});
}
}
// Apply our prank
if let Some(prank) = &self.get_prank(curr_depth) {
// Apply delegate call, `call.caller`` will not equal `prank.prank_caller`
if prank.delegate_call
&& curr_depth == prank.depth
&& let CallScheme::DelegateCall = call.scheme
{
call.target_address = prank.new_caller;
call.caller = prank.new_caller;
if let Some(new_origin) = prank.new_origin {
ecx.tx.caller = new_origin;
}
}
if curr_depth >= prank.depth && call.caller == prank.prank_caller {
let mut prank_applied = false;
// At the target depth we set `msg.sender`
if curr_depth == prank.depth {
// Ensure new caller is loaded and touched
let _ = journaled_account(ecx, prank.new_caller);
call.caller = prank.new_caller;
prank_applied = true;
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/crypto.rs | crates/cheatcodes/src/crypto.rs | //! Implementations of [`Crypto`](spec::Group::Crypto) Cheatcodes.
use crate::{Cheatcode, Cheatcodes, Result, Vm::*};
use alloy_primitives::{Address, B256, U256, keccak256};
use alloy_signer::{Signer, SignerSync};
use alloy_signer_local::{
LocalSigner, MnemonicBuilder, PrivateKeySigner,
coins_bip39::{
ChineseSimplified, ChineseTraditional, Czech, English, French, Italian, Japanese, Korean,
Portuguese, Spanish, Wordlist,
},
};
use alloy_sol_types::SolValue;
use k256::{
FieldBytes, Scalar,
ecdsa::{SigningKey, hazmat},
elliptic_curve::{bigint::ArrayEncoding, sec1::ToEncodedPoint},
};
use p256::ecdsa::{
Signature as P256Signature, SigningKey as P256SigningKey, signature::hazmat::PrehashSigner,
};
/// The BIP32 default derivation path prefix.
const DEFAULT_DERIVATION_PATH_PREFIX: &str = "m/44'/60'/0'/0/";
impl Cheatcode for createWallet_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { walletLabel } = self;
create_wallet(&U256::from_be_bytes(keccak256(walletLabel).0), Some(walletLabel), state)
}
}
impl Cheatcode for createWallet_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { privateKey } = self;
create_wallet(privateKey, None, state)
}
}
impl Cheatcode for createWallet_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { privateKey, walletLabel } = self;
create_wallet(privateKey, Some(walletLabel), state)
}
}
impl Cheatcode for sign_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { wallet, digest } = self;
let sig = sign(&wallet.privateKey, digest)?;
Ok(encode_full_sig(sig))
}
}
impl Cheatcode for signWithNonceUnsafeCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let pk: U256 = self.privateKey;
let digest: B256 = self.digest;
let nonce: U256 = self.nonce;
let sig: alloy_primitives::Signature = sign_with_nonce(&pk, &digest, &nonce)?;
Ok(encode_full_sig(sig))
}
}
impl Cheatcode for signCompact_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { wallet, digest } = self;
let sig = sign(&wallet.privateKey, digest)?;
Ok(encode_compact_sig(sig))
}
}
impl Cheatcode for deriveKey_0Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { mnemonic, index } = self;
derive_key::<English>(mnemonic, DEFAULT_DERIVATION_PATH_PREFIX, *index)
}
}
impl Cheatcode for deriveKey_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { mnemonic, derivationPath, index } = self;
derive_key::<English>(mnemonic, derivationPath, *index)
}
}
impl Cheatcode for deriveKey_2Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { mnemonic, index, language } = self;
derive_key_str(mnemonic, DEFAULT_DERIVATION_PATH_PREFIX, *index, language)
}
}
impl Cheatcode for deriveKey_3Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { mnemonic, derivationPath, index, language } = self;
derive_key_str(mnemonic, derivationPath, *index, language)
}
}
impl Cheatcode for rememberKeyCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { privateKey } = self;
let wallet = parse_wallet(privateKey)?;
let address = inject_wallet(state, wallet);
Ok(address.abi_encode())
}
}
impl Cheatcode for rememberKeys_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { mnemonic, derivationPath, count } = self;
let wallets = derive_wallets::<English>(mnemonic, derivationPath, *count)?;
let mut addresses = Vec::<Address>::with_capacity(wallets.len());
for wallet in wallets {
let addr = inject_wallet(state, wallet);
addresses.push(addr);
}
Ok(addresses.abi_encode())
}
}
impl Cheatcode for rememberKeys_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { mnemonic, derivationPath, language, count } = self;
let wallets = derive_wallets_str(mnemonic, derivationPath, language, *count)?;
let mut addresses = Vec::<Address>::with_capacity(wallets.len());
for wallet in wallets {
let addr = inject_wallet(state, wallet);
addresses.push(addr);
}
Ok(addresses.abi_encode())
}
}
fn inject_wallet(state: &mut Cheatcodes, wallet: LocalSigner<SigningKey>) -> Address {
let address = wallet.address();
state.wallets().add_local_signer(wallet);
address
}
impl Cheatcode for sign_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { privateKey, digest } = self;
let sig = sign(privateKey, digest)?;
Ok(encode_full_sig(sig))
}
}
impl Cheatcode for signCompact_1Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { privateKey, digest } = self;
let sig = sign(privateKey, digest)?;
Ok(encode_compact_sig(sig))
}
}
impl Cheatcode for sign_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { digest } = self;
let sig = sign_with_wallet(state, None, digest)?;
Ok(encode_full_sig(sig))
}
}
impl Cheatcode for signCompact_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { digest } = self;
let sig = sign_with_wallet(state, None, digest)?;
Ok(encode_compact_sig(sig))
}
}
impl Cheatcode for sign_3Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { signer, digest } = self;
let sig = sign_with_wallet(state, Some(*signer), digest)?;
Ok(encode_full_sig(sig))
}
}
impl Cheatcode for signCompact_3Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { signer, digest } = self;
let sig = sign_with_wallet(state, Some(*signer), digest)?;
Ok(encode_compact_sig(sig))
}
}
impl Cheatcode for signP256Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { privateKey, digest } = self;
sign_p256(privateKey, digest)
}
}
impl Cheatcode for publicKeyP256Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { privateKey } = self;
let pub_key =
parse_private_key_p256(privateKey)?.verifying_key().as_affine().to_encoded_point(false);
let pub_key_x = U256::from_be_bytes((*pub_key.x().unwrap()).into());
let pub_key_y = U256::from_be_bytes((*pub_key.y().unwrap()).into());
Ok((pub_key_x, pub_key_y).abi_encode())
}
}
/// Using a given private key, return its public ETH address, its public key affine x and y
/// coordinates, and its private key (see the 'Wallet' struct)
///
/// If 'label' is set to 'Some()', assign that label to the associated ETH address in state
fn create_wallet(private_key: &U256, label: Option<&str>, state: &mut Cheatcodes) -> Result {
let key = parse_private_key(private_key)?;
let addr = alloy_signer::utils::secret_key_to_address(&key);
let pub_key = key.verifying_key().as_affine().to_encoded_point(false);
let pub_key_x = U256::from_be_bytes((*pub_key.x().unwrap()).into());
let pub_key_y = U256::from_be_bytes((*pub_key.y().unwrap()).into());
if let Some(label) = label {
state.labels.insert(addr, label.into());
}
Ok(Wallet { addr, publicKeyX: pub_key_x, publicKeyY: pub_key_y, privateKey: *private_key }
.abi_encode())
}
fn encode_full_sig(sig: alloy_primitives::Signature) -> Vec<u8> {
// Retrieve v, r and s from signature.
let v = U256::from(sig.v() as u64 + 27);
let r = B256::from(sig.r());
let s = B256::from(sig.s());
(v, r, s).abi_encode()
}
fn encode_compact_sig(sig: alloy_primitives::Signature) -> Vec<u8> {
// Implement EIP-2098 compact signature.
let r = B256::from(sig.r());
let mut vs = sig.s();
vs.set_bit(255, sig.v());
(r, vs).abi_encode()
}
fn sign(private_key: &U256, digest: &B256) -> Result<alloy_primitives::Signature> {
// The `ecrecover` precompile does not use EIP-155. No chain ID is needed.
let wallet = parse_wallet(private_key)?;
let sig = wallet.sign_hash_sync(digest)?;
debug_assert_eq!(sig.recover_address_from_prehash(digest)?, wallet.address());
Ok(sig)
}
/// Signs `digest` on secp256k1 using a user-supplied ephemeral nonce `k` (no RFC6979).
/// - `private_key` and `nonce` must be in (0, n)
/// - `digest` is a 32-byte prehash.
///
/// # Warning
///
/// Use [`sign_with_nonce`] with extreme caution!
/// Reusing the same nonce (`k`) with the same private key in ECDSA will leak the private key.
/// Always generate `nonce` with a cryptographically secure RNG, and never reuse it across
/// signatures.
fn sign_with_nonce(
private_key: &U256,
digest: &B256,
nonce: &U256,
) -> Result<alloy_primitives::Signature> {
let d_scalar: Scalar =
<Scalar as k256::elliptic_curve::PrimeField>::from_repr(private_key.to_be_bytes().into())
.into_option()
.ok_or_else(|| fmt_err!("invalid private key scalar"))?;
if bool::from(d_scalar.is_zero()) {
return Err(fmt_err!("private key cannot be 0"));
}
let k_scalar: Scalar =
<Scalar as k256::elliptic_curve::PrimeField>::from_repr(nonce.to_be_bytes().into())
.into_option()
.ok_or_else(|| fmt_err!("invalid nonce scalar"))?;
if bool::from(k_scalar.is_zero()) {
return Err(fmt_err!("nonce cannot be 0"));
}
let mut z = [0u8; 32];
z.copy_from_slice(digest.as_slice());
let z_fb: FieldBytes = FieldBytes::from(z);
// Hazmat signing using the scalar `d` (SignPrimitive is implemented for `Scalar`)
// Note: returns (Signature, Option<RecoveryId>)
let (sig_raw, recid_opt) =
<Scalar as hazmat::SignPrimitive<k256::Secp256k1>>::try_sign_prehashed(
&d_scalar, k_scalar, &z_fb,
)
.map_err(|e| fmt_err!("sign_prehashed failed: {e}"))?;
// Enforce low-s; if mirrored, parity flips (we’ll account for it below if we use recid)
let (sig_low, flipped) =
if let Some(norm) = sig_raw.normalize_s() { (norm, true) } else { (sig_raw, false) };
let r_u256 = U256::from_be_bytes(sig_low.r().to_bytes().into());
let s_u256 = U256::from_be_bytes(sig_low.s().to_bytes().into());
// Determine v parity in {0,1}
let v_parity = if let Some(id) = recid_opt {
let mut v = id.to_byte() & 1;
if flipped {
v ^= 1;
}
v
} else {
// Fallback: choose parity by recovery to expected address
let expected_addr = {
let sk: SigningKey = parse_private_key(private_key)?;
alloy_signer::utils::secret_key_to_address(&sk)
};
// Try v = 0
let cand0 = alloy_primitives::Signature::new(r_u256, s_u256, false);
if cand0.recover_address_from_prehash(digest).ok() == Some(expected_addr) {
return Ok(cand0);
}
// Try v = 1
let cand1 = alloy_primitives::Signature::new(r_u256, s_u256, true);
if cand1.recover_address_from_prehash(digest).ok() == Some(expected_addr) {
return Ok(cand1);
}
return Err(fmt_err!("failed to determine recovery id for signature"));
};
let y_parity = v_parity != 0;
Ok(alloy_primitives::Signature::new(r_u256, s_u256, y_parity))
}
fn sign_with_wallet(
state: &mut Cheatcodes,
signer: Option<Address>,
digest: &B256,
) -> Result<alloy_primitives::Signature> {
if state.wallets().is_empty() {
bail!("no wallets available");
}
let mut wallets = state.wallets().inner.lock();
let maybe_provided_sender = wallets.provided_sender;
let signers = wallets.multi_wallet.signers()?;
let signer = if let Some(signer) = signer {
signer
} else if let Some(provided_sender) = maybe_provided_sender {
provided_sender
} else if signers.len() == 1 {
*signers.keys().next().unwrap()
} else {
bail!(
"could not determine signer, there are multiple signers available use vm.sign(signer, digest) to specify one"
);
};
let wallet = signers
.get(&signer)
.ok_or_else(|| fmt_err!("signer with address {signer} is not available"))?;
let sig = foundry_common::block_on(wallet.sign_hash(digest))?;
debug_assert_eq!(sig.recover_address_from_prehash(digest)?, signer);
Ok(sig)
}
fn sign_p256(private_key: &U256, digest: &B256) -> Result {
let signing_key = parse_private_key_p256(private_key)?;
let signature: P256Signature = signing_key.sign_prehash(digest.as_slice())?;
let signature = signature.normalize_s().unwrap_or(signature);
let r_bytes: [u8; 32] = signature.r().to_bytes().into();
let s_bytes: [u8; 32] = signature.s().to_bytes().into();
Ok((r_bytes, s_bytes).abi_encode())
}
fn validate_private_key<C: ecdsa::PrimeCurve>(private_key: &U256) -> Result<()> {
ensure!(*private_key != U256::ZERO, "private key cannot be 0");
let order = U256::from_be_slice(&C::ORDER.to_be_byte_array());
ensure!(
*private_key < order,
"private key must be less than the {curve:?} curve order ({order})",
curve = C::default(),
);
Ok(())
}
fn parse_private_key(private_key: &U256) -> Result<SigningKey> {
validate_private_key::<k256::Secp256k1>(private_key)?;
Ok(SigningKey::from_bytes((&private_key.to_be_bytes()).into())?)
}
fn parse_private_key_p256(private_key: &U256) -> Result<P256SigningKey> {
validate_private_key::<p256::NistP256>(private_key)?;
Ok(P256SigningKey::from_bytes((&private_key.to_be_bytes()).into())?)
}
pub(super) fn parse_wallet(private_key: &U256) -> Result<PrivateKeySigner> {
parse_private_key(private_key).map(PrivateKeySigner::from)
}
fn derive_key_str(mnemonic: &str, path: &str, index: u32, language: &str) -> Result {
match language {
"chinese_simplified" => derive_key::<ChineseSimplified>(mnemonic, path, index),
"chinese_traditional" => derive_key::<ChineseTraditional>(mnemonic, path, index),
"czech" => derive_key::<Czech>(mnemonic, path, index),
"english" => derive_key::<English>(mnemonic, path, index),
"french" => derive_key::<French>(mnemonic, path, index),
"italian" => derive_key::<Italian>(mnemonic, path, index),
"japanese" => derive_key::<Japanese>(mnemonic, path, index),
"korean" => derive_key::<Korean>(mnemonic, path, index),
"portuguese" => derive_key::<Portuguese>(mnemonic, path, index),
"spanish" => derive_key::<Spanish>(mnemonic, path, index),
_ => Err(fmt_err!("unsupported mnemonic language: {language:?}")),
}
}
fn derive_key<W: Wordlist>(mnemonic: &str, path: &str, index: u32) -> Result {
fn derive_key_path(path: &str, index: u32) -> String {
let mut out = path.to_string();
if !out.ends_with('/') {
out.push('/');
}
out.push_str(&index.to_string());
out
}
let wallet = MnemonicBuilder::<W>::default()
.phrase(mnemonic)
.derivation_path(derive_key_path(path, index))?
.build()?;
let private_key = U256::from_be_bytes(wallet.credential().to_bytes().into());
Ok(private_key.abi_encode())
}
fn derive_wallets_str(
mnemonic: &str,
path: &str,
language: &str,
count: u32,
) -> Result<Vec<LocalSigner<SigningKey>>> {
match language {
"chinese_simplified" => derive_wallets::<ChineseSimplified>(mnemonic, path, count),
"chinese_traditional" => derive_wallets::<ChineseTraditional>(mnemonic, path, count),
"czech" => derive_wallets::<Czech>(mnemonic, path, count),
"english" => derive_wallets::<English>(mnemonic, path, count),
"french" => derive_wallets::<French>(mnemonic, path, count),
"italian" => derive_wallets::<Italian>(mnemonic, path, count),
"japanese" => derive_wallets::<Japanese>(mnemonic, path, count),
"korean" => derive_wallets::<Korean>(mnemonic, path, count),
"portuguese" => derive_wallets::<Portuguese>(mnemonic, path, count),
"spanish" => derive_wallets::<Spanish>(mnemonic, path, count),
_ => Err(fmt_err!("unsupported mnemonic language: {language:?}")),
}
}
fn derive_wallets<W: Wordlist>(
mnemonic: &str,
path: &str,
count: u32,
) -> Result<Vec<LocalSigner<SigningKey>>> {
let mut out = path.to_string();
if !out.ends_with('/') {
out.push('/');
}
let mut wallets = Vec::with_capacity(count as usize);
for idx in 0..count {
let wallet = MnemonicBuilder::<W>::default()
.phrase(mnemonic)
.derivation_path(format!("{out}{idx}"))?
.build()?;
wallets.push(wallet);
}
Ok(wallets)
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{FixedBytes, hex::FromHex};
use k256::elliptic_curve::Curve;
use p256::ecdsa::signature::hazmat::PrehashVerifier;
#[test]
fn test_sign_p256() {
use p256::ecdsa::VerifyingKey;
let pk_u256: U256 = "1".parse().unwrap();
let signing_key = P256SigningKey::from_bytes(&pk_u256.to_be_bytes().into()).unwrap();
let digest = FixedBytes::from_hex(
"0x44acf6b7e36c1342c2c5897204fe09504e1e2efb1a900377dbc4e7a6a133ec56",
)
.unwrap();
let result = sign_p256(&pk_u256, &digest).unwrap();
let result_bytes: [u8; 64] = result.try_into().unwrap();
let signature = P256Signature::from_bytes(&result_bytes.into()).unwrap();
let verifying_key = VerifyingKey::from(&signing_key);
assert!(verifying_key.verify_prehash(digest.as_slice(), &signature).is_ok());
}
#[test]
fn test_sign_p256_pk_too_large() {
// max n from https://neuromancer.sk/std/secg/secp256r1
let pk =
"0xffffffff00000000ffffffffffffffffbce6faada7179e84f3b9cac2fc632551".parse().unwrap();
let digest = FixedBytes::from_hex(
"0x54705ba3baafdbdfba8c5f9a70f7a89bee98d906b53e31074da7baecdc0da9ad",
)
.unwrap();
let result = sign_p256(&pk, &digest);
assert_eq!(
result.err().unwrap().to_string(),
"private key must be less than the NistP256 curve order (115792089210356248762697446949407573529996955224135760342422259061068512044369)"
);
}
#[test]
fn test_sign_p256_pk_0() {
let digest = FixedBytes::from_hex(
"0x54705ba3baafdbdfba8c5f9a70f7a89bee98d906b53e31074da7baecdc0da9ad",
)
.unwrap();
let result = sign_p256(&U256::ZERO, &digest);
assert_eq!(result.err().unwrap().to_string(), "private key cannot be 0");
}
#[test]
fn test_sign_with_nonce_varies_and_recovers() {
// Given a fixed private key and digest
let pk_u256: U256 = U256::from(1u64);
let digest = FixedBytes::from_hex(
"0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
)
.unwrap();
// Two distinct nonces
let n1: U256 = U256::from(123u64);
let n2: U256 = U256::from(456u64);
// Sign with both nonces
let sig1 = sign_with_nonce(&pk_u256, &digest, &n1).expect("sig1");
let sig2 = sign_with_nonce(&pk_u256, &digest, &n2).expect("sig2");
// (r,s) must differ when nonce differs
assert!(
sig1.r() != sig2.r() || sig1.s() != sig2.s(),
"signatures should differ with different nonces"
);
// ecrecover must yield the address for both signatures
let sk = parse_private_key(&pk_u256).unwrap();
let expected = alloy_signer::utils::secret_key_to_address(&sk);
assert_eq!(sig1.recover_address_from_prehash(&digest).unwrap(), expected);
assert_eq!(sig2.recover_address_from_prehash(&digest).unwrap(), expected);
}
#[test]
fn test_sign_with_nonce_zero_nonce_errors() {
// nonce = 0 should be rejected
let pk_u256: U256 = U256::from(1u64);
let digest = FixedBytes::from_hex(
"0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
)
.unwrap();
let n0: U256 = U256::ZERO;
let err = sign_with_nonce(&pk_u256, &digest, &n0).unwrap_err();
let msg = err.to_string();
assert!(msg.contains("nonce cannot be 0"), "unexpected error: {msg}");
}
#[test]
fn test_sign_with_nonce_nonce_ge_order_errors() {
// nonce >= n should be rejected
use k256::Secp256k1;
// Curve order n as U256
let n_u256 = U256::from_be_slice(&Secp256k1::ORDER.to_be_byte_array());
let pk_u256: U256 = U256::from(1u64);
let digest = FixedBytes::from_hex(
"0xcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc",
)
.unwrap();
// Try exactly n (>= n invalid)
let err = sign_with_nonce(&pk_u256, &digest, &n_u256).unwrap_err();
let msg = err.to_string();
assert!(msg.contains("invalid nonce scalar"), "unexpected error: {msg}");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/toml.rs | crates/cheatcodes/src/toml.rs | //! Implementations of [`Toml`](spec::Group::Toml) cheatcodes.
use crate::{
Cheatcode, Cheatcodes, Result,
Vm::*,
json::{
check_json_key_exists, parse_json, parse_json_coerce, parse_json_keys, resolve_type,
upsert_json_value,
},
};
use alloy_dyn_abi::DynSolType;
use alloy_sol_types::SolValue;
use foundry_common::{fmt::StructDefinitions, fs};
use foundry_config::fs_permissions::FsAccessKind;
use serde_json::Value as JsonValue;
use toml::Value as TomlValue;
impl Cheatcode for keyExistsTomlCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
check_json_key_exists(&toml_to_json_string(toml)?, key)
}
}
impl Cheatcode for parseToml_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { toml } = self;
parse_toml(
toml,
"$",
state.analysis.as_ref().and_then(|analysis| analysis.struct_defs().ok()),
)
}
}
impl Cheatcode for parseToml_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml(
toml,
key,
state.analysis.as_ref().and_then(|analysis| analysis.struct_defs().ok()),
)
}
}
impl Cheatcode for parseTomlUintCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Uint(256))
}
}
impl Cheatcode for parseTomlUintArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Uint(256))))
}
}
impl Cheatcode for parseTomlIntCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Int(256))
}
}
impl Cheatcode for parseTomlIntArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Int(256))))
}
}
impl Cheatcode for parseTomlBoolCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Bool)
}
}
impl Cheatcode for parseTomlBoolArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Bool)))
}
}
impl Cheatcode for parseTomlAddressCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Address)
}
}
impl Cheatcode for parseTomlAddressArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Address)))
}
}
impl Cheatcode for parseTomlStringCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::String)
}
}
impl Cheatcode for parseTomlStringArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::String)))
}
}
impl Cheatcode for parseTomlBytesCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Bytes)
}
}
impl Cheatcode for parseTomlBytesArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::Bytes)))
}
}
impl Cheatcode for parseTomlBytes32Call {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::FixedBytes(32))
}
}
impl Cheatcode for parseTomlBytes32ArrayCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(DynSolType::FixedBytes(32))))
}
}
impl Cheatcode for parseTomlType_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { toml, typeDescription } = self;
parse_toml_coerce(
toml,
"$",
&resolve_type(
typeDescription,
state.analysis.as_ref().and_then(|analysis| analysis.struct_defs().ok()),
)?,
)
.map(|v| v.abi_encode())
}
}
impl Cheatcode for parseTomlType_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { toml, key, typeDescription } = self;
parse_toml_coerce(
toml,
key,
&resolve_type(
typeDescription,
state.analysis.as_ref().and_then(|analysis| analysis.struct_defs().ok()),
)?,
)
.map(|v| v.abi_encode())
}
}
impl Cheatcode for parseTomlTypeArrayCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { toml, key, typeDescription } = self;
let ty = resolve_type(
typeDescription,
state.analysis.as_ref().and_then(|analysis| analysis.struct_defs().ok()),
)?;
parse_toml_coerce(toml, key, &DynSolType::Array(Box::new(ty))).map(|v| v.abi_encode())
}
}
impl Cheatcode for parseTomlKeysCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { toml, key } = self;
parse_toml_keys(toml, key)
}
}
impl Cheatcode for writeToml_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json, path } = self;
let value =
serde_json::from_str(json).unwrap_or_else(|_| JsonValue::String(json.to_owned()));
let toml_string = format_json_to_toml(value)?;
super::fs::write_file(state, path.as_ref(), toml_string.as_bytes())
}
}
impl Cheatcode for writeToml_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { json: value, path, valueKey } = self;
// Read and parse the TOML file
let data_path = state.config.ensure_path_allowed(path, FsAccessKind::Read)?;
let toml_data = fs::locked_read_to_string(&data_path)?;
// Convert to JSON and update the object
let mut json_data: JsonValue =
toml::from_str(&toml_data).map_err(|e| fmt_err!("failed parsing TOML: {e}"))?;
upsert_json_value(&mut json_data, value, valueKey)?;
// Serialize back to TOML and write the updated content back to the file
let toml_string = format_json_to_toml(json_data)?;
super::fs::write_file(state, path.as_ref(), toml_string.as_bytes())
}
}
/// Parse
fn parse_toml_str(toml: &str) -> Result<TomlValue> {
toml::from_str(toml).map_err(|e| fmt_err!("failed parsing TOML: {e}"))
}
/// Parse a TOML string and return the value at the given path.
fn parse_toml(toml: &str, key: &str, struct_defs: Option<&StructDefinitions>) -> Result {
parse_json(&toml_to_json_string(toml)?, key, struct_defs)
}
/// Parse a TOML string and return the value at the given path, coercing it to the given type.
fn parse_toml_coerce(toml: &str, key: &str, ty: &DynSolType) -> Result {
parse_json_coerce(&toml_to_json_string(toml)?, key, ty)
}
/// Parse a TOML string and return an array of all keys at the given path.
fn parse_toml_keys(toml: &str, key: &str) -> Result {
parse_json_keys(&toml_to_json_string(toml)?, key)
}
/// Convert a TOML string to a JSON string.
fn toml_to_json_string(toml: &str) -> Result<String> {
let toml = parse_toml_str(toml)?;
let json = toml_to_json_value(toml);
serde_json::to_string(&json).map_err(|e| fmt_err!("failed to serialize JSON: {e}"))
}
/// Format a JSON value to a TOML pretty string.
fn format_json_to_toml(json: JsonValue) -> Result<String> {
let toml = json_to_toml_value(json);
toml::to_string_pretty(&toml).map_err(|e| fmt_err!("failed to serialize TOML: {e}"))
}
/// Convert a TOML value to a JSON value.
pub(super) fn toml_to_json_value(toml: TomlValue) -> JsonValue {
match toml {
TomlValue::String(s) => match s.as_str() {
"null" => JsonValue::Null,
_ => JsonValue::String(s),
},
TomlValue::Integer(i) => JsonValue::Number(i.into()),
TomlValue::Float(f) => match serde_json::Number::from_f64(f) {
Some(n) => JsonValue::Number(n),
None => JsonValue::String(f.to_string()),
},
TomlValue::Boolean(b) => JsonValue::Bool(b),
TomlValue::Array(a) => JsonValue::Array(a.into_iter().map(toml_to_json_value).collect()),
TomlValue::Table(t) => {
JsonValue::Object(t.into_iter().map(|(k, v)| (k, toml_to_json_value(v))).collect())
}
TomlValue::Datetime(d) => JsonValue::String(d.to_string()),
}
}
/// Convert a JSON value to a TOML value.
fn json_to_toml_value(json: JsonValue) -> TomlValue {
match json {
JsonValue::String(s) => TomlValue::String(s),
JsonValue::Number(n) => match n.as_i64() {
Some(i) => TomlValue::Integer(i),
None => match n.as_f64() {
Some(f) => TomlValue::Float(f),
None => TomlValue::String(n.to_string()),
},
},
JsonValue::Bool(b) => TomlValue::Boolean(b),
JsonValue::Array(a) => TomlValue::Array(a.into_iter().map(json_to_toml_value).collect()),
JsonValue::Object(o) => {
TomlValue::Table(o.into_iter().map(|(k, v)| (k, json_to_toml_value(v))).collect())
}
JsonValue::Null => TomlValue::String("null".to_string()),
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm/mapping.rs | crates/cheatcodes/src/evm/mapping.rs | use crate::{Cheatcode, Cheatcodes, Result, Vm::*};
use alloy_primitives::{Address, B256};
use alloy_sol_types::SolValue;
use foundry_common::mapping_slots::MappingSlots;
impl Cheatcode for startMappingRecordingCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.mapping_slots.get_or_insert_default();
Ok(Default::default())
}
}
impl Cheatcode for stopMappingRecordingCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.mapping_slots = None;
Ok(Default::default())
}
}
impl Cheatcode for getMappingLengthCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { target, mappingSlot } = self;
let result = slot_child(state, target, mappingSlot).map(Vec::len).unwrap_or(0);
Ok((result as u64).abi_encode())
}
}
impl Cheatcode for getMappingSlotAtCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { target, mappingSlot, idx } = self;
let result = slot_child(state, target, mappingSlot)
.and_then(|set| set.get(idx.saturating_to::<usize>()))
.copied()
.unwrap_or_default();
Ok(result.abi_encode())
}
}
impl Cheatcode for getMappingKeyAndParentOfCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { target, elementSlot: slot } = self;
let mut found = false;
let mut key = &B256::ZERO;
let mut parent = &B256::ZERO;
if let Some(slots) = mapping_slot(state, target) {
if let Some(key2) = slots.keys.get(slot) {
found = true;
key = key2;
parent = &slots.parent_slots[slot];
} else if let Some((key2, parent2)) = slots.seen_sha3.get(slot) {
found = true;
key = key2;
parent = parent2;
}
}
Ok((found, key, parent).abi_encode_params())
}
}
fn mapping_slot<'a>(state: &'a Cheatcodes, target: &'a Address) -> Option<&'a MappingSlots> {
state.mapping_slots.as_ref()?.get(target)
}
fn slot_child<'a>(
state: &'a Cheatcodes,
target: &'a Address,
slot: &'a B256,
) -> Option<&'a Vec<B256>> {
mapping_slot(state, target)?.children.get(slot)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm/record_debug_step.rs | crates/cheatcodes/src/evm/record_debug_step.rs | use alloy_primitives::{Bytes, U256};
use foundry_evm_traces::CallTraceArena;
use revm::{bytecode::opcode::OpCode, interpreter::InstructionResult};
use foundry_evm_core::buffer::{BufferKind, get_buffer_accesses};
use revm_inspectors::tracing::types::{
CallTraceNode, CallTraceStep, RecordedMemory, TraceMemberOrder,
};
use spec::Vm::DebugStep;
// Context for a CallTraceStep, includes depth and contract address.
pub(crate) struct CallTraceCtx<'a> {
pub node: &'a CallTraceNode,
pub step: &'a CallTraceStep,
}
// Do a depth first traverse of the nodes and steps and return steps
// that are after `node_start_idx`
pub(crate) fn flatten_call_trace<'a>(
root: usize,
arena: &'a CallTraceArena,
node_start_idx: usize,
) -> Vec<CallTraceCtx<'a>> {
let mut steps = Vec::new();
let mut record_started = false;
// Start the recursion from the root node
recursive_flatten_call_trace(root, arena, node_start_idx, &mut record_started, &mut steps);
steps
}
// Inner recursive function to process nodes.
// This implementation directly mutates `record_started` and `flatten_steps`.
// So the recursive call can change the `record_started` flag even for the parent
// unfinished processing, and append steps to the `flatten_steps` as the final result.
fn recursive_flatten_call_trace<'a>(
node_idx: usize,
arena: &'a CallTraceArena,
node_start_idx: usize,
record_started: &mut bool,
flatten_steps: &mut Vec<CallTraceCtx<'a>>,
) {
// Once node_idx exceeds node_start_idx, start recording steps
// for all the recursive processing.
if !*record_started && node_idx >= node_start_idx {
*record_started = true;
}
let node = &arena.nodes()[node_idx];
for order in &node.ordering {
match order {
TraceMemberOrder::Step(step_idx) => {
if *record_started {
let step = &node.trace.steps[*step_idx];
flatten_steps.push(CallTraceCtx { node, step });
}
}
TraceMemberOrder::Call(call_idx) => {
let child_node_idx = node.children[*call_idx];
recursive_flatten_call_trace(
child_node_idx,
arena,
node_start_idx,
record_started,
flatten_steps,
);
}
_ => {}
}
}
}
// Function to convert CallTraceStep to DebugStep
pub(crate) fn convert_call_trace_ctx_to_debug_step(ctx: &CallTraceCtx) -> DebugStep {
let opcode = ctx.step.op.get();
let stack = get_stack_inputs_for_opcode(opcode, ctx.step.stack.as_deref());
let memory =
get_memory_input_for_opcode(opcode, ctx.step.stack.as_deref(), ctx.step.memory.as_ref());
let is_out_of_gas = matches!(
ctx.step.status,
Some(
InstructionResult::OutOfGas
| InstructionResult::MemoryOOG
| InstructionResult::MemoryLimitOOG
| InstructionResult::PrecompileOOG
| InstructionResult::InvalidOperandOOG
)
);
let depth = ctx.node.trace.depth as u64 + 1;
let contract_addr = ctx.node.execution_address();
DebugStep {
stack,
memoryInput: memory,
opcode: ctx.step.op.get(),
depth,
isOutOfGas: is_out_of_gas,
contractAddr: contract_addr,
}
}
// The expected `stack` here is from the trace stack, where the top of the stack
// is the last value of the vector
fn get_memory_input_for_opcode(
opcode: u8,
stack: Option<&[U256]>,
memory: Option<&RecordedMemory>,
) -> Bytes {
let mut memory_input = Bytes::new();
let Some(stack_data) = stack else { return memory_input };
let Some(memory_data) = memory else { return memory_input };
if let Some(accesses) = get_buffer_accesses(opcode, stack_data)
&& let Some((BufferKind::Memory, access)) = accesses.read
{
memory_input = get_slice_from_memory(memory_data.as_bytes(), access.offset, access.len);
};
memory_input
}
// The expected `stack` here is from the trace stack, where the top of the stack
// is the last value of the vector
fn get_stack_inputs_for_opcode(opcode: u8, stack: Option<&[U256]>) -> Vec<U256> {
let mut inputs = Vec::new();
let Some(op) = OpCode::new(opcode) else { return inputs };
let Some(stack_data) = stack else { return inputs };
let stack_input_size = op.inputs() as usize;
for i in 0..stack_input_size {
inputs.push(stack_data[stack_data.len() - 1 - i]);
}
inputs
}
fn get_slice_from_memory(memory: &Bytes, start_index: usize, size: usize) -> Bytes {
let memory_len = memory.len();
let end_bound = start_index + size;
// Return the bytes if data is within the range.
if start_index < memory_len && end_bound <= memory_len {
return memory.slice(start_index..end_bound);
}
// Pad zero bytes if attempting to load memory partially out of range.
if start_index < memory_len && end_bound > memory_len {
let mut result = memory.slice(start_index..memory_len).to_vec();
result.resize(size, 0u8);
return Bytes::from(result);
}
// Return empty bytes with the size if not in range at all.
Bytes::from(vec![0u8; size])
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm/prank.rs | crates/cheatcodes/src/evm/prank.rs | use crate::{Cheatcode, CheatsCtxt, Result, Vm::*, evm::journaled_account};
use alloy_primitives::Address;
use revm::context::JournalTr;
/// Prank information.
#[derive(Clone, Copy, Debug, Default)]
pub struct Prank {
/// Address of the contract that initiated the prank
pub prank_caller: Address,
/// Address of `tx.origin` when the prank was initiated
pub prank_origin: Address,
/// The address to assign to `msg.sender`
pub new_caller: Address,
/// The address to assign to `tx.origin`
pub new_origin: Option<Address>,
/// The depth at which the prank was called
pub depth: usize,
/// Whether the prank stops by itself after the next call
pub single_call: bool,
/// Whether the prank should be applied to delegate call
pub delegate_call: bool,
/// Whether the prank has been used yet (false if unused)
pub used: bool,
}
impl Prank {
/// Create a new prank.
pub fn new(
prank_caller: Address,
prank_origin: Address,
new_caller: Address,
new_origin: Option<Address>,
depth: usize,
single_call: bool,
delegate_call: bool,
) -> Self {
Self {
prank_caller,
prank_origin,
new_caller,
new_origin,
depth,
single_call,
delegate_call,
used: false,
}
}
/// Apply the prank by setting `used` to true if it is false
/// Only returns self in the case it is updated (first application)
pub fn first_time_applied(&self) -> Option<Self> {
if self.used { None } else { Some(Self { used: true, ..*self }) }
}
}
impl Cheatcode for prank_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender } = self;
prank(ccx, msgSender, None, true, false)
}
}
impl Cheatcode for startPrank_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender } = self;
prank(ccx, msgSender, None, false, false)
}
}
impl Cheatcode for prank_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, txOrigin } = self;
prank(ccx, msgSender, Some(txOrigin), true, false)
}
}
impl Cheatcode for startPrank_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, txOrigin } = self;
prank(ccx, msgSender, Some(txOrigin), false, false)
}
}
impl Cheatcode for prank_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, delegateCall } = self;
prank(ccx, msgSender, None, true, *delegateCall)
}
}
impl Cheatcode for startPrank_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, delegateCall } = self;
prank(ccx, msgSender, None, false, *delegateCall)
}
}
impl Cheatcode for prank_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, txOrigin, delegateCall } = self;
prank(ccx, msgSender, Some(txOrigin), true, *delegateCall)
}
}
impl Cheatcode for startPrank_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { msgSender, txOrigin, delegateCall } = self;
prank(ccx, msgSender, Some(txOrigin), false, *delegateCall)
}
}
impl Cheatcode for stopPrankCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
ccx.state.pranks.remove(&ccx.ecx.journaled_state.depth());
Ok(Default::default())
}
}
fn prank(
ccx: &mut CheatsCtxt,
new_caller: &Address,
new_origin: Option<&Address>,
single_call: bool,
delegate_call: bool,
) -> Result {
// Ensure that we load the account of the pranked address and mark it as touched.
// This is necessary to ensure that account state changes (such as the account's `nonce`) are
// properly tracked.
let account = journaled_account(ccx.ecx, *new_caller)?;
// Ensure that code exists at `msg.sender` if delegate calling.
if delegate_call {
ensure!(
account.info.code.as_ref().is_some_and(|code| !code.is_empty()),
"cannot `prank` delegate call from an EOA"
);
}
let depth = ccx.ecx.journaled_state.depth();
if let Some(Prank { used, single_call: current_single_call, .. }) = ccx.state.get_prank(depth) {
ensure!(used, "cannot overwrite a prank until it is applied at least once");
// This case can only fail if the user calls `vm.startPrank` and then `vm.prank` later on.
// This should not be possible without first calling `stopPrank`
ensure!(
single_call == *current_single_call,
"cannot override an ongoing prank with a single vm.prank; \
use vm.startPrank to override the current prank"
);
}
let prank = Prank::new(
ccx.caller,
ccx.ecx.tx.caller,
*new_caller,
new_origin.copied(),
depth,
single_call,
delegate_call,
);
ensure!(
ccx.state.broadcast.is_none(),
"cannot `prank` for a broadcasted transaction; \
pass the desired `tx.origin` into the `broadcast` cheatcode call"
);
ccx.state.pranks.insert(prank.depth, prank);
Ok(Default::default())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm/mock.rs | crates/cheatcodes/src/evm/mock.rs | use crate::{Cheatcode, Cheatcodes, CheatsCtxt, Result, Vm::*};
use alloy_primitives::{Address, Bytes, U256};
use revm::{bytecode::Bytecode, context::JournalTr, interpreter::InstructionResult};
use std::{cmp::Ordering, collections::VecDeque};
/// Mocked call data.
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub struct MockCallDataContext {
/// The partial calldata to match for mock
pub calldata: Bytes,
/// The value to match for mock
pub value: Option<U256>,
}
/// Mocked return data.
#[derive(Clone, Debug)]
pub struct MockCallReturnData {
/// The return type for the mocked call
pub ret_type: InstructionResult,
/// Return data or error
pub data: Bytes,
}
impl PartialOrd for MockCallDataContext {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for MockCallDataContext {
fn cmp(&self, other: &Self) -> Ordering {
// Calldata matching is reversed to ensure that a tighter match is
// returned if an exact match is not found. In case, there is
// a partial match to calldata that is more specific than
// a match to a msg.value, then the more specific calldata takes
// precedence.
self.calldata.cmp(&other.calldata).reverse().then(self.value.cmp(&other.value).reverse())
}
}
impl Cheatcode for clearMockedCallsCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self {} = self;
state.mocked_calls = Default::default();
Ok(Default::default())
}
}
impl Cheatcode for mockCall_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(ccx.state, callee, data, None, returnData, InstructionResult::Return);
Ok(Default::default())
}
}
impl Cheatcode for mockCall_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, msgValue, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(ccx.state, callee, data, Some(msgValue), returnData, InstructionResult::Return);
Ok(Default::default())
}
}
impl Cheatcode for mockCall_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(
ccx.state,
callee,
&Bytes::from(*data),
None,
returnData,
InstructionResult::Return,
);
Ok(Default::default())
}
}
impl Cheatcode for mockCall_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, msgValue, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(
ccx.state,
callee,
&Bytes::from(*data),
Some(msgValue),
returnData,
InstructionResult::Return,
);
Ok(Default::default())
}
}
impl Cheatcode for mockCalls_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_calls(ccx.state, callee, data, None, returnData, InstructionResult::Return);
Ok(Default::default())
}
}
impl Cheatcode for mockCalls_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, msgValue, data, returnData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_calls(ccx.state, callee, data, Some(msgValue), returnData, InstructionResult::Return);
Ok(Default::default())
}
}
impl Cheatcode for mockCallRevert_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, data, revertData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(ccx.state, callee, data, None, revertData, InstructionResult::Revert);
Ok(Default::default())
}
}
impl Cheatcode for mockCallRevert_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, msgValue, data, revertData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(ccx.state, callee, data, Some(msgValue), revertData, InstructionResult::Revert);
Ok(Default::default())
}
}
impl Cheatcode for mockCallRevert_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, data, revertData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(
ccx.state,
callee,
&Bytes::from(*data),
None,
revertData,
InstructionResult::Revert,
);
Ok(Default::default())
}
}
impl Cheatcode for mockCallRevert_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { callee, msgValue, data, revertData } = self;
let _ = make_acc_non_empty(callee, ccx)?;
mock_call(
ccx.state,
callee,
&Bytes::from(*data),
Some(msgValue),
revertData,
InstructionResult::Revert,
);
Ok(Default::default())
}
}
impl Cheatcode for mockFunctionCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, target, data } = self;
state.mocked_functions.entry(*callee).or_default().insert(data.clone(), *target);
Ok(Default::default())
}
}
fn mock_call(
state: &mut Cheatcodes,
callee: &Address,
cdata: &Bytes,
value: Option<&U256>,
rdata: &Bytes,
ret_type: InstructionResult,
) {
mock_calls(state, callee, cdata, value, std::slice::from_ref(rdata), ret_type)
}
fn mock_calls(
state: &mut Cheatcodes,
callee: &Address,
cdata: &Bytes,
value: Option<&U256>,
rdata_vec: &[Bytes],
ret_type: InstructionResult,
) {
state.mocked_calls.entry(*callee).or_default().insert(
MockCallDataContext { calldata: cdata.clone(), value: value.copied() },
rdata_vec
.iter()
.map(|rdata| MockCallReturnData { ret_type, data: rdata.clone() })
.collect::<VecDeque<_>>(),
);
}
// Etches a single byte onto the account if it is empty to circumvent the `extcodesize`
// check Solidity might perform.
fn make_acc_non_empty(callee: &Address, ecx: &mut CheatsCtxt) -> Result {
let acc = ecx.journaled_state.load_account(*callee)?;
let empty_bytecode = acc.info.code.as_ref().is_none_or(Bytecode::is_empty);
if empty_bytecode {
let code = Bytecode::new_raw(Bytes::from_static(&[0u8]));
ecx.journaled_state.set_code(*callee, code);
}
Ok(Default::default())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/evm/fork.rs | crates/cheatcodes/src/evm/fork.rs | use crate::{
Cheatcode, Cheatcodes, CheatcodesExecutor, CheatsCtxt, DatabaseExt, Result, Vm::*,
json::json_value_to_token,
};
use alloy_dyn_abi::DynSolValue;
use alloy_primitives::{B256, U256};
use alloy_provider::Provider;
use alloy_rpc_types::Filter;
use alloy_sol_types::SolValue;
use foundry_common::provider::ProviderBuilder;
use foundry_evm_core::{AsEnvMut, ContextExt, fork::CreateFork};
impl Cheatcode for activeForkCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
ccx.ecx
.journaled_state
.database
.active_fork_id()
.map(|id| id.abi_encode())
.ok_or_else(|| fmt_err!("no active fork"))
}
}
impl Cheatcode for createFork_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias } = self;
create_fork(ccx, urlOrAlias, None)
}
}
impl Cheatcode for createFork_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias, blockNumber } = self;
create_fork(ccx, urlOrAlias, Some(blockNumber.saturating_to()))
}
}
impl Cheatcode for createFork_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias, txHash } = self;
create_fork_at_transaction(ccx, urlOrAlias, txHash)
}
}
impl Cheatcode for createSelectFork_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias } = self;
create_select_fork(ccx, urlOrAlias, None)
}
}
impl Cheatcode for createSelectFork_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias, blockNumber } = self;
create_select_fork(ccx, urlOrAlias, Some(blockNumber.saturating_to()))
}
}
impl Cheatcode for createSelectFork_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { urlOrAlias, txHash } = self;
create_select_fork_at_transaction(ccx, urlOrAlias, txHash)
}
}
impl Cheatcode for rollFork_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { blockNumber } = self;
persist_caller(ccx);
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
db.roll_fork(None, (*blockNumber).to(), &mut env, journal)?;
Ok(Default::default())
}
}
impl Cheatcode for rollFork_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { txHash } = self;
persist_caller(ccx);
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
db.roll_fork_to_transaction(None, *txHash, &mut env, journal)?;
Ok(Default::default())
}
}
impl Cheatcode for rollFork_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { forkId, blockNumber } = self;
persist_caller(ccx);
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
db.roll_fork(Some(*forkId), (*blockNumber).to(), &mut env, journal)?;
Ok(Default::default())
}
}
impl Cheatcode for rollFork_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { forkId, txHash } = self;
persist_caller(ccx);
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
db.roll_fork_to_transaction(Some(*forkId), *txHash, &mut env, journal)?;
Ok(Default::default())
}
}
impl Cheatcode for selectForkCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { forkId } = self;
persist_caller(ccx);
check_broadcast(ccx.state)?;
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
db.select_fork(*forkId, &mut env, journal)?;
Ok(Default::default())
}
}
impl Cheatcode for transact_0Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { txHash } = *self;
transact(ccx, executor, txHash, None)
}
}
impl Cheatcode for transact_1Call {
fn apply_full(&self, ccx: &mut CheatsCtxt, executor: &mut dyn CheatcodesExecutor) -> Result {
let Self { forkId, txHash } = *self;
transact(ccx, executor, txHash, Some(forkId))
}
}
impl Cheatcode for allowCheatcodesCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
ccx.ecx.journaled_state.database.allow_cheatcode_access(*account);
Ok(Default::default())
}
}
impl Cheatcode for makePersistent_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
ccx.ecx.journaled_state.database.add_persistent_account(*account);
Ok(Default::default())
}
}
impl Cheatcode for makePersistent_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account0, account1 } = self;
ccx.ecx.journaled_state.database.add_persistent_account(*account0);
ccx.ecx.journaled_state.database.add_persistent_account(*account1);
Ok(Default::default())
}
}
impl Cheatcode for makePersistent_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account0, account1, account2 } = self;
ccx.ecx.journaled_state.database.add_persistent_account(*account0);
ccx.ecx.journaled_state.database.add_persistent_account(*account1);
ccx.ecx.journaled_state.database.add_persistent_account(*account2);
Ok(Default::default())
}
}
impl Cheatcode for makePersistent_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { accounts } = self;
for account in accounts {
ccx.ecx.journaled_state.database.add_persistent_account(*account);
}
Ok(Default::default())
}
}
impl Cheatcode for revokePersistent_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
ccx.ecx.journaled_state.database.remove_persistent_account(account);
Ok(Default::default())
}
}
impl Cheatcode for revokePersistent_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { accounts } = self;
for account in accounts {
ccx.ecx.journaled_state.database.remove_persistent_account(account);
}
Ok(Default::default())
}
}
impl Cheatcode for isPersistentCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { account } = self;
Ok(ccx.ecx.journaled_state.database.is_persistent(account).abi_encode())
}
}
impl Cheatcode for rpc_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { method, params } = self;
let url = ccx
.ecx
.journaled_state
.database
.active_fork_url()
.ok_or_else(|| fmt_err!("no active fork URL found"))?;
rpc_call(&url, method, params)
}
}
impl Cheatcode for rpc_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { urlOrAlias, method, params } = self;
let url = state.config.rpc_endpoint(urlOrAlias)?.url()?;
rpc_call(&url, method, params)
}
}
impl Cheatcode for eth_getLogsCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { fromBlock, toBlock, target, topics } = self;
let (Ok(from_block), Ok(to_block)) = (u64::try_from(fromBlock), u64::try_from(toBlock))
else {
bail!("blocks in block range must be less than 2^64")
};
if topics.len() > 4 {
bail!("topics array must contain at most 4 elements")
}
let url = ccx
.ecx
.journaled_state
.database
.active_fork_url()
.ok_or_else(|| fmt_err!("no active fork URL found"))?;
let provider = ProviderBuilder::new(&url).build()?;
let mut filter = Filter::new().address(*target).from_block(from_block).to_block(to_block);
for (i, &topic) in topics.iter().enumerate() {
filter.topics[i] = topic.into();
}
let logs = foundry_common::block_on(provider.get_logs(&filter))
.map_err(|e| fmt_err!("failed to get logs: {e}"))?;
let eth_logs = logs
.into_iter()
.map(|log| EthGetLogs {
emitter: log.address(),
topics: log.topics().to_vec(),
data: log.inner.data.data,
blockHash: log.block_hash.unwrap_or_default(),
blockNumber: log.block_number.unwrap_or_default(),
transactionHash: log.transaction_hash.unwrap_or_default(),
transactionIndex: log.transaction_index.unwrap_or_default(),
logIndex: U256::from(log.log_index.unwrap_or_default()),
removed: log.removed,
})
.collect::<Vec<_>>();
Ok(eth_logs.abi_encode())
}
}
impl Cheatcode for getRawBlockHeaderCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { blockNumber } = self;
let url = ccx
.ecx
.journaled_state
.database
.active_fork_url()
.ok_or_else(|| fmt_err!("no active fork"))?;
let provider = ProviderBuilder::new(&url).build()?;
let block_number = u64::try_from(blockNumber)
.map_err(|_| fmt_err!("block number must be less than 2^64"))?;
let block =
foundry_common::block_on(async move { provider.get_block(block_number.into()).await })
.map_err(|e| fmt_err!("failed to get block: {e}"))?
.ok_or_else(|| fmt_err!("block {block_number} not found"))?;
let header: alloy_consensus::Header = block
.into_inner()
.header
.inner
.try_into_header()
.map_err(|e| fmt_err!("failed to convert to header: {e}"))?;
Ok(alloy_rlp::encode(&header).abi_encode())
}
}
/// Creates and then also selects the new fork
fn create_select_fork(ccx: &mut CheatsCtxt, url_or_alias: &str, block: Option<u64>) -> Result {
check_broadcast(ccx.state)?;
let fork = create_fork_request(ccx, url_or_alias, block)?;
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
let id = db.create_select_fork(fork, &mut env, journal)?;
Ok(id.abi_encode())
}
/// Creates a new fork
fn create_fork(ccx: &mut CheatsCtxt, url_or_alias: &str, block: Option<u64>) -> Result {
let fork = create_fork_request(ccx, url_or_alias, block)?;
let id = ccx.ecx.journaled_state.database.create_fork(fork)?;
Ok(id.abi_encode())
}
/// Creates and then also selects the new fork at the given transaction
fn create_select_fork_at_transaction(
ccx: &mut CheatsCtxt,
url_or_alias: &str,
transaction: &B256,
) -> Result {
check_broadcast(ccx.state)?;
let fork = create_fork_request(ccx, url_or_alias, None)?;
let (db, journal, mut env) = ccx.ecx.as_db_env_and_journal();
let id = db.create_select_fork_at_transaction(fork, &mut env, journal, *transaction)?;
Ok(id.abi_encode())
}
/// Creates a new fork at the given transaction
fn create_fork_at_transaction(
ccx: &mut CheatsCtxt,
url_or_alias: &str,
transaction: &B256,
) -> Result {
let fork = create_fork_request(ccx, url_or_alias, None)?;
let id = ccx.ecx.journaled_state.database.create_fork_at_transaction(fork, *transaction)?;
Ok(id.abi_encode())
}
/// Creates the request object for a new fork request
fn create_fork_request(
ccx: &mut CheatsCtxt,
url_or_alias: &str,
block: Option<u64>,
) -> Result<CreateFork> {
persist_caller(ccx);
let rpc_endpoint = ccx.state.config.rpc_endpoint(url_or_alias)?;
let url = rpc_endpoint.url()?;
let mut evm_opts = ccx.state.config.evm_opts.clone();
evm_opts.fork_block_number = block;
evm_opts.fork_retries = rpc_endpoint.config.retries;
evm_opts.fork_retry_backoff = rpc_endpoint.config.retry_backoff;
if let Some(Ok(auth)) = rpc_endpoint.auth {
evm_opts.fork_headers = Some(vec![format!("Authorization: {auth}")]);
}
let fork = CreateFork {
enable_caching: !ccx.state.config.no_storage_caching
&& ccx.state.config.rpc_storage_caching.enable_for_endpoint(&url),
url,
env: ccx.ecx.as_env_mut().to_owned(),
evm_opts,
};
Ok(fork)
}
fn check_broadcast(state: &Cheatcodes) -> Result<()> {
if state.broadcast.is_none() {
Ok(())
} else {
Err(fmt_err!("cannot select forks during a broadcast"))
}
}
fn transact(
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
transaction: B256,
fork_id: Option<U256>,
) -> Result {
let (db, journal, env) = ccx.ecx.as_db_env_and_journal();
db.transact(
fork_id,
transaction,
env.to_owned(),
journal,
&mut *executor.get_inspector(ccx.state),
)?;
Ok(Default::default())
}
// Helper to add the caller of fork cheat code as persistent account (in order to make sure that the
// state of caller contract is not lost when fork changes).
// Applies to create, select and roll forks actions.
// https://github.com/foundry-rs/foundry/issues/8004
fn persist_caller(ccx: &mut CheatsCtxt) {
ccx.ecx.journaled_state.database.add_persistent_account(ccx.caller);
}
/// Performs an Ethereum JSON-RPC request to the given endpoint.
fn rpc_call(url: &str, method: &str, params: &str) -> Result {
let provider = ProviderBuilder::new(url).build()?;
let params_json: serde_json::Value = serde_json::from_str(params)?;
let result =
foundry_common::block_on(provider.raw_request(method.to_string().into(), params_json))
.map_err(|err| fmt_err!("{method:?}: {err}"))?;
let result_as_tokens = convert_to_bytes(
&json_value_to_token(&result, None)
.map_err(|err| fmt_err!("failed to parse result: {err}"))?,
);
Ok(result_as_tokens.abi_encode())
}
/// Convert fixed bytes and address values to bytes in order to prevent encoding issues.
fn convert_to_bytes(token: &DynSolValue) -> DynSolValue {
match token {
// Convert fixed bytes to prevent encoding issues.
// See: <https://github.com/foundry-rs/foundry/issues/8287>
DynSolValue::FixedBytes(bytes, size) => {
DynSolValue::Bytes(bytes.as_slice()[..*size].to_vec())
}
DynSolValue::Address(addr) => DynSolValue::Bytes(addr.to_vec()),
// Convert tuple values to prevent encoding issues.
// See: <https://github.com/foundry-rs/foundry/issues/7858>
DynSolValue::Tuple(vals) => DynSolValue::Tuple(vals.iter().map(convert_to_bytes).collect()),
val => val.clone(),
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/test/assert.rs | crates/cheatcodes/src/test/assert.rs | use crate::{CheatcodesExecutor, CheatsCtxt, Result, Vm::*};
use alloy_primitives::{I256, U256, U512};
use foundry_evm_core::{
abi::console::{format_units_int, format_units_uint},
backend::GLOBAL_FAIL_SLOT,
constants::CHEATCODE_ADDRESS,
};
use itertools::Itertools;
use revm::context::JournalTr;
use std::{borrow::Cow, fmt};
const EQ_REL_DELTA_RESOLUTION: U256 = U256::from_limbs([18, 0, 0, 0]);
struct ComparisonAssertionError<'a, T> {
kind: AssertionKind,
left: &'a T,
right: &'a T,
}
#[derive(Clone, Copy)]
enum AssertionKind {
Eq,
Ne,
Gt,
Ge,
Lt,
Le,
}
impl AssertionKind {
fn inverse(self) -> Self {
match self {
Self::Eq => Self::Ne,
Self::Ne => Self::Eq,
Self::Gt => Self::Le,
Self::Ge => Self::Lt,
Self::Lt => Self::Ge,
Self::Le => Self::Gt,
}
}
fn to_str(self) -> &'static str {
match self {
Self::Eq => "==",
Self::Ne => "!=",
Self::Gt => ">",
Self::Ge => ">=",
Self::Lt => "<",
Self::Le => "<=",
}
}
}
impl<T> ComparisonAssertionError<'_, T> {
fn format_values<D: fmt::Display>(&self, f: impl Fn(&T) -> D) -> String {
format!("{} {} {}", f(self.left), self.kind.inverse().to_str(), f(self.right))
}
}
impl<T: fmt::Display> ComparisonAssertionError<'_, T> {
fn format_for_values(&self) -> String {
self.format_values(T::to_string)
}
}
impl<T: fmt::Display> ComparisonAssertionError<'_, Vec<T>> {
fn format_for_arrays(&self) -> String {
self.format_values(|v| format!("[{}]", v.iter().format(", ")))
}
}
impl ComparisonAssertionError<'_, U256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
self.format_values(|v| format_units_uint(v, decimals))
}
}
impl ComparisonAssertionError<'_, I256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
self.format_values(|v| format_units_int(v, decimals))
}
}
#[derive(thiserror::Error, Debug)]
#[error("{left} !~= {right} (max delta: {max_delta}, real delta: {real_delta})")]
struct EqAbsAssertionError<T, D> {
left: T,
right: T,
max_delta: D,
real_delta: D,
}
impl EqAbsAssertionError<U256, U256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
format!(
"{} !~= {} (max delta: {}, real delta: {})",
format_units_uint(&self.left, decimals),
format_units_uint(&self.right, decimals),
format_units_uint(&self.max_delta, decimals),
format_units_uint(&self.real_delta, decimals),
)
}
}
impl EqAbsAssertionError<I256, U256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
format!(
"{} !~= {} (max delta: {}, real delta: {})",
format_units_int(&self.left, decimals),
format_units_int(&self.right, decimals),
format_units_uint(&self.max_delta, decimals),
format_units_uint(&self.real_delta, decimals),
)
}
}
fn format_delta_percent(delta: &U256) -> String {
format!("{}%", format_units_uint(delta, &(EQ_REL_DELTA_RESOLUTION - U256::from(2))))
}
#[derive(Debug)]
enum EqRelDelta {
Defined(U256),
Undefined,
}
impl fmt::Display for EqRelDelta {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Defined(delta) => write!(f, "{}", format_delta_percent(delta)),
Self::Undefined => write!(f, "undefined"),
}
}
}
#[derive(thiserror::Error, Debug)]
#[error(
"{left} !~= {right} (max delta: {}, real delta: {})",
format_delta_percent(max_delta),
real_delta
)]
struct EqRelAssertionFailure<T> {
left: T,
right: T,
max_delta: U256,
real_delta: EqRelDelta,
}
#[derive(thiserror::Error, Debug)]
enum EqRelAssertionError<T> {
#[error(transparent)]
Failure(Box<EqRelAssertionFailure<T>>),
#[error("overflow in delta calculation")]
Overflow,
}
impl EqRelAssertionError<U256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
match self {
Self::Failure(f) => format!(
"{} !~= {} (max delta: {}, real delta: {})",
format_units_uint(&f.left, decimals),
format_units_uint(&f.right, decimals),
format_delta_percent(&f.max_delta),
&f.real_delta,
),
Self::Overflow => self.to_string(),
}
}
}
impl EqRelAssertionError<I256> {
fn format_with_decimals(&self, decimals: &U256) -> String {
match self {
Self::Failure(f) => format!(
"{} !~= {} (max delta: {}, real delta: {})",
format_units_int(&f.left, decimals),
format_units_int(&f.right, decimals),
format_delta_percent(&f.max_delta),
&f.real_delta,
),
Self::Overflow => self.to_string(),
}
}
}
type ComparisonResult<'a, T> = Result<(), ComparisonAssertionError<'a, T>>;
#[cold]
fn handle_assertion_result<E>(
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
err: E,
error_formatter: Option<&dyn Fn(&E) -> String>,
error_msg: Option<&str>,
) -> Result {
let error_msg = error_msg.unwrap_or("assertion failed");
let msg = if let Some(error_formatter) = error_formatter {
Cow::Owned(format!("{error_msg}: {}", error_formatter(&err)))
} else {
Cow::Borrowed(error_msg)
};
handle_assertion_result_mono(ccx, executor, msg)
}
fn handle_assertion_result_mono(
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
msg: Cow<'_, str>,
) -> Result {
if ccx.state.config.assertions_revert {
Err(msg.into_owned().into())
} else {
executor.console_log(ccx, &msg);
ccx.ecx.journaled_state.sstore(CHEATCODE_ADDRESS, GLOBAL_FAIL_SLOT, U256::from(1))?;
Ok(Default::default())
}
}
/// Implements [crate::Cheatcode] for pairs of cheatcodes.
///
/// Accepts a list of pairs of cheatcodes, where the first cheatcode is the one that doesn't contain
/// a custom error message, and the second one contains it at `error` field.
///
/// Passed `args` are the common arguments for both cheatcode structs (excluding `error` field).
///
/// Macro also accepts an optional closure that formats the error returned by the assertion.
macro_rules! impl_assertions {
(|$($arg:ident),*| $body:expr, false, $(($no_error:ident, $with_error:ident)),* $(,)?) => {
impl_assertions! { @args_tt |($($arg),*)| $body, None, $(($no_error, $with_error)),* }
};
(|$($arg:ident),*| $body:expr, $(($no_error:ident, $with_error:ident)),* $(,)?) => {
impl_assertions! { @args_tt |($($arg),*)| $body, Some(&ToString::to_string), $(($no_error, $with_error)),* }
};
(|$($arg:ident),*| $body:expr, $error_formatter:expr, $(($no_error:ident, $with_error:ident)),* $(,)?) => {
impl_assertions! { @args_tt |($($arg),*)| $body, Some(&$error_formatter), $(($no_error, $with_error)),* }
};
// We convert args to `tt` and later expand them back into tuple to allow usage of expanded args inside of
// each assertion type context.
(@args_tt |$args:tt| $body:expr, $error_formatter:expr, $(($no_error:ident, $with_error:ident)),* $(,)?) => {
$(
impl_assertions! { @impl $no_error, $with_error, $args, $body, $error_formatter }
)*
};
(@impl $no_error:ident, $with_error:ident, ($($arg:ident),*), $body:expr, $error_formatter:expr) => {
impl crate::Cheatcode for $no_error {
fn apply_full(
&self,
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
) -> Result {
let Self { $($arg),* } = self;
match $body {
Ok(()) => Ok(Default::default()),
Err(err) => handle_assertion_result(ccx, executor, err, $error_formatter, None)
}
}
}
impl crate::Cheatcode for $with_error {
fn apply_full(
&self,
ccx: &mut CheatsCtxt,
executor: &mut dyn CheatcodesExecutor,
) -> Result {
let Self { $($arg,)* error } = self;
match $body {
Ok(()) => Ok(Default::default()),
Err(err) => handle_assertion_result(ccx, executor, err, $error_formatter, Some(error))
}
}
}
};
}
impl_assertions! {
|condition| assert_true(*condition),
false,
(assertTrue_0Call, assertTrue_1Call),
}
impl_assertions! {
|condition| assert_false(*condition),
false,
(assertFalse_0Call, assertFalse_1Call),
}
impl_assertions! {
|left, right| assert_eq(left, right),
ComparisonAssertionError::format_for_values,
(assertEq_0Call, assertEq_1Call),
(assertEq_2Call, assertEq_3Call),
(assertEq_4Call, assertEq_5Call),
(assertEq_6Call, assertEq_7Call),
(assertEq_8Call, assertEq_9Call),
(assertEq_10Call, assertEq_11Call),
(assertEq_12Call, assertEq_13Call),
}
impl_assertions! {
|left, right| assert_eq(left, right),
ComparisonAssertionError::format_for_arrays,
(assertEq_14Call, assertEq_15Call),
(assertEq_16Call, assertEq_17Call),
(assertEq_18Call, assertEq_19Call),
(assertEq_20Call, assertEq_21Call),
(assertEq_22Call, assertEq_23Call),
(assertEq_24Call, assertEq_25Call),
(assertEq_26Call, assertEq_27Call),
}
impl_assertions! {
|left, right, decimals| assert_eq(left, right),
|e| e.format_with_decimals(decimals),
(assertEqDecimal_0Call, assertEqDecimal_1Call),
(assertEqDecimal_2Call, assertEqDecimal_3Call),
}
impl_assertions! {
|left, right| assert_not_eq(left, right),
ComparisonAssertionError::format_for_values,
(assertNotEq_0Call, assertNotEq_1Call),
(assertNotEq_2Call, assertNotEq_3Call),
(assertNotEq_4Call, assertNotEq_5Call),
(assertNotEq_6Call, assertNotEq_7Call),
(assertNotEq_8Call, assertNotEq_9Call),
(assertNotEq_10Call, assertNotEq_11Call),
(assertNotEq_12Call, assertNotEq_13Call),
}
impl_assertions! {
|left, right| assert_not_eq(left, right),
ComparisonAssertionError::format_for_arrays,
(assertNotEq_14Call, assertNotEq_15Call),
(assertNotEq_16Call, assertNotEq_17Call),
(assertNotEq_18Call, assertNotEq_19Call),
(assertNotEq_20Call, assertNotEq_21Call),
(assertNotEq_22Call, assertNotEq_23Call),
(assertNotEq_24Call, assertNotEq_25Call),
(assertNotEq_26Call, assertNotEq_27Call),
}
impl_assertions! {
|left, right, decimals| assert_not_eq(left, right),
|e| e.format_with_decimals(decimals),
(assertNotEqDecimal_0Call, assertNotEqDecimal_1Call),
(assertNotEqDecimal_2Call, assertNotEqDecimal_3Call),
}
impl_assertions! {
|left, right| assert_gt(left, right),
ComparisonAssertionError::format_for_values,
(assertGt_0Call, assertGt_1Call),
(assertGt_2Call, assertGt_3Call),
}
impl_assertions! {
|left, right, decimals| assert_gt(left, right),
|e| e.format_with_decimals(decimals),
(assertGtDecimal_0Call, assertGtDecimal_1Call),
(assertGtDecimal_2Call, assertGtDecimal_3Call),
}
impl_assertions! {
|left, right| assert_ge(left, right),
ComparisonAssertionError::format_for_values,
(assertGe_0Call, assertGe_1Call),
(assertGe_2Call, assertGe_3Call),
}
impl_assertions! {
|left, right, decimals| assert_ge(left, right),
|e| e.format_with_decimals(decimals),
(assertGeDecimal_0Call, assertGeDecimal_1Call),
(assertGeDecimal_2Call, assertGeDecimal_3Call),
}
impl_assertions! {
|left, right| assert_lt(left, right),
ComparisonAssertionError::format_for_values,
(assertLt_0Call, assertLt_1Call),
(assertLt_2Call, assertLt_3Call),
}
impl_assertions! {
|left, right, decimals| assert_lt(left, right),
|e| e.format_with_decimals(decimals),
(assertLtDecimal_0Call, assertLtDecimal_1Call),
(assertLtDecimal_2Call, assertLtDecimal_3Call),
}
impl_assertions! {
|left, right| assert_le(left, right),
ComparisonAssertionError::format_for_values,
(assertLe_0Call, assertLe_1Call),
(assertLe_2Call, assertLe_3Call),
}
impl_assertions! {
|left, right, decimals| assert_le(left, right),
|e| e.format_with_decimals(decimals),
(assertLeDecimal_0Call, assertLeDecimal_1Call),
(assertLeDecimal_2Call, assertLeDecimal_3Call),
}
impl_assertions! {
|left, right, maxDelta| uint_assert_approx_eq_abs(*left, *right, *maxDelta),
(assertApproxEqAbs_0Call, assertApproxEqAbs_1Call),
}
impl_assertions! {
|left, right, maxDelta| int_assert_approx_eq_abs(*left, *right, *maxDelta),
(assertApproxEqAbs_2Call, assertApproxEqAbs_3Call),
}
impl_assertions! {
|left, right, decimals, maxDelta| uint_assert_approx_eq_abs(*left, *right, *maxDelta),
|e| e.format_with_decimals(decimals),
(assertApproxEqAbsDecimal_0Call, assertApproxEqAbsDecimal_1Call),
}
impl_assertions! {
|left, right, decimals, maxDelta| int_assert_approx_eq_abs(*left, *right, *maxDelta),
|e| e.format_with_decimals(decimals),
(assertApproxEqAbsDecimal_2Call, assertApproxEqAbsDecimal_3Call),
}
impl_assertions! {
|left, right, maxPercentDelta| uint_assert_approx_eq_rel(*left, *right, *maxPercentDelta),
(assertApproxEqRel_0Call, assertApproxEqRel_1Call),
}
impl_assertions! {
|left, right, maxPercentDelta| int_assert_approx_eq_rel(*left, *right, *maxPercentDelta),
(assertApproxEqRel_2Call, assertApproxEqRel_3Call),
}
impl_assertions! {
|left, right, decimals, maxPercentDelta| uint_assert_approx_eq_rel(*left, *right, *maxPercentDelta),
|e| e.format_with_decimals(decimals),
(assertApproxEqRelDecimal_0Call, assertApproxEqRelDecimal_1Call),
}
impl_assertions! {
|left, right, decimals, maxPercentDelta| int_assert_approx_eq_rel(*left, *right, *maxPercentDelta),
|e| e.format_with_decimals(decimals),
(assertApproxEqRelDecimal_2Call, assertApproxEqRelDecimal_3Call),
}
fn assert_true(condition: bool) -> Result<(), ()> {
if condition { Ok(()) } else { Err(()) }
}
fn assert_false(condition: bool) -> Result<(), ()> {
assert_true(!condition)
}
fn assert_eq<'a, T: PartialEq>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left == right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Eq, left, right })
}
}
fn assert_not_eq<'a, T: PartialEq>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left != right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Ne, left, right })
}
}
fn assert_gt<'a, T: PartialOrd>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left > right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Gt, left, right })
}
}
fn assert_ge<'a, T: PartialOrd>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left >= right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Ge, left, right })
}
}
fn assert_lt<'a, T: PartialOrd>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left < right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Lt, left, right })
}
}
fn assert_le<'a, T: PartialOrd>(left: &'a T, right: &'a T) -> ComparisonResult<'a, T> {
if left <= right {
Ok(())
} else {
Err(ComparisonAssertionError { kind: AssertionKind::Le, left, right })
}
}
fn get_delta_int(left: I256, right: I256) -> U256 {
let (left_sign, left_abs) = left.into_sign_and_abs();
let (right_sign, right_abs) = right.into_sign_and_abs();
if left_sign == right_sign {
if left_abs > right_abs { left_abs - right_abs } else { right_abs - left_abs }
} else {
left_abs.wrapping_add(right_abs)
}
}
/// Calculates the relative delta for an absolute difference.
///
/// Avoids overflow in the multiplication by using [`U512`] to hold the intermediary result.
fn calc_delta_full<T>(abs_diff: U256, right: U256) -> Result<U256, EqRelAssertionError<T>> {
let delta = U512::from(abs_diff) * U512::from(10).pow(U512::from(EQ_REL_DELTA_RESOLUTION))
/ U512::from(right);
U256::checked_from_limbs_slice(delta.as_limbs()).ok_or(EqRelAssertionError::Overflow)
}
fn uint_assert_approx_eq_abs(
left: U256,
right: U256,
max_delta: U256,
) -> Result<(), Box<EqAbsAssertionError<U256, U256>>> {
let delta = left.abs_diff(right);
if delta <= max_delta {
Ok(())
} else {
Err(Box::new(EqAbsAssertionError { left, right, max_delta, real_delta: delta }))
}
}
fn int_assert_approx_eq_abs(
left: I256,
right: I256,
max_delta: U256,
) -> Result<(), Box<EqAbsAssertionError<I256, U256>>> {
let delta = get_delta_int(left, right);
if delta <= max_delta {
Ok(())
} else {
Err(Box::new(EqAbsAssertionError { left, right, max_delta, real_delta: delta }))
}
}
fn uint_assert_approx_eq_rel(
left: U256,
right: U256,
max_delta: U256,
) -> Result<(), EqRelAssertionError<U256>> {
if right.is_zero() {
if left.is_zero() {
return Ok(());
} else {
return Err(EqRelAssertionError::Failure(Box::new(EqRelAssertionFailure {
left,
right,
max_delta,
real_delta: EqRelDelta::Undefined,
})));
};
}
let delta = calc_delta_full::<U256>(left.abs_diff(right), right)?;
if delta <= max_delta {
Ok(())
} else {
Err(EqRelAssertionError::Failure(Box::new(EqRelAssertionFailure {
left,
right,
max_delta,
real_delta: EqRelDelta::Defined(delta),
})))
}
}
fn int_assert_approx_eq_rel(
left: I256,
right: I256,
max_delta: U256,
) -> Result<(), EqRelAssertionError<I256>> {
if right.is_zero() {
if left.is_zero() {
return Ok(());
} else {
return Err(EqRelAssertionError::Failure(Box::new(EqRelAssertionFailure {
left,
right,
max_delta,
real_delta: EqRelDelta::Undefined,
})));
}
}
let delta = calc_delta_full::<I256>(get_delta_int(left, right), right.unsigned_abs())?;
if delta <= max_delta {
Ok(())
} else {
Err(EqRelAssertionError::Failure(Box::new(EqRelAssertionFailure {
left,
right,
max_delta,
real_delta: EqRelDelta::Defined(delta),
})))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/test/revert_handlers.rs | crates/cheatcodes/src/test/revert_handlers.rs | use crate::{Error, Result};
use alloy_dyn_abi::{DynSolValue, ErrorExt};
use alloy_primitives::{Address, Bytes, address, hex};
use alloy_sol_types::{SolError, SolValue};
use foundry_common::{ContractsByArtifact, abi::get_error};
use foundry_evm_core::decode::RevertDecoder;
use revm::interpreter::{InstructionResult, return_ok};
use spec::Vm;
use super::{
assume::{AcceptableRevertParameters, AssumeNoRevert},
expect::ExpectedRevert,
};
/// For some cheatcodes we may internally change the status of the call, i.e. in `expectRevert`.
/// Solidity will see a successful call and attempt to decode the return data. Therefore, we need
/// to populate the return with dummy bytes so the decode doesn't fail.
///
/// 8192 bytes was arbitrarily chosen because it is long enough for return values up to 256 words in
/// size.
static DUMMY_CALL_OUTPUT: Bytes = Bytes::from_static(&[0u8; 8192]);
/// Same reasoning as [DUMMY_CALL_OUTPUT], but for creates.
const DUMMY_CREATE_ADDRESS: Address = address!("0x0000000000000000000000000000000000000001");
fn stringify(data: &[u8]) -> String {
if let Ok(s) = String::abi_decode(data) {
return s;
}
if data.is_ascii() {
return std::str::from_utf8(data).unwrap().to_owned();
}
hex::encode_prefixed(data)
}
/// Common parameters for expected or assumed reverts. Allows for code reuse.
pub(crate) trait RevertParameters {
fn reverter(&self) -> Option<Address>;
fn reason(&self) -> Option<&[u8]>;
fn partial_match(&self) -> bool;
}
impl RevertParameters for AcceptableRevertParameters {
fn reverter(&self) -> Option<Address> {
self.reverter
}
fn reason(&self) -> Option<&[u8]> {
Some(&self.reason)
}
fn partial_match(&self) -> bool {
self.partial_match
}
}
/// Core logic for handling reverts that may or may not be expected (or assumed).
fn handle_revert(
is_cheatcode: bool,
revert_params: &impl RevertParameters,
status: InstructionResult,
retdata: &Bytes,
known_contracts: &Option<ContractsByArtifact>,
reverter: Option<&Address>,
) -> Result<(), Error> {
// If expected reverter address is set then check it matches the actual reverter.
if let (Some(expected_reverter), Some(&actual_reverter)) = (revert_params.reverter(), reverter)
&& expected_reverter != actual_reverter
{
return Err(fmt_err!(
"Reverter != expected reverter: {} != {}",
actual_reverter,
expected_reverter
));
}
let expected_reason = revert_params.reason();
// If None, accept any revert.
let Some(expected_reason) = expected_reason else {
return Ok(());
};
if !expected_reason.is_empty() && retdata.is_empty() {
bail!("call reverted as expected, but without data");
}
let mut actual_revert: Vec<u8> = retdata.to_vec();
// Compare only the first 4 bytes if partial match.
if revert_params.partial_match() && actual_revert.get(..4) == expected_reason.get(..4) {
return Ok(());
}
// Try decoding as known errors.
actual_revert = decode_revert(actual_revert);
if actual_revert == expected_reason
|| (is_cheatcode && memchr::memmem::find(&actual_revert, expected_reason).is_some())
{
return Ok(());
}
// If expected reason is `Error(string)` then decode and compare with actual revert.
// See <https://github.com/foundry-rs/foundry/issues/12511>
if let Ok(e) = get_error("Error(string)")
&& let Ok(dec) = e.decode_error(expected_reason)
&& let Some(DynSolValue::String(revert_str)) = dec.body.first()
&& revert_str.as_str() == String::from_utf8_lossy(&actual_revert)
{
return Ok(());
}
let (actual, expected) = if let Some(contracts) = known_contracts {
let decoder = RevertDecoder::new().with_abis(contracts.values().map(|c| &c.abi));
(
&decoder.decode(actual_revert.as_slice(), Some(status)),
&decoder.decode(expected_reason, Some(status)),
)
} else {
(&stringify(&actual_revert), &stringify(expected_reason))
};
if expected == actual {
return Ok(());
}
Err(fmt_err!("Error != expected error: {} != {}", actual, expected))
}
pub(crate) fn handle_assume_no_revert(
assume_no_revert: &AssumeNoRevert,
status: InstructionResult,
retdata: &Bytes,
known_contracts: &Option<ContractsByArtifact>,
) -> Result<()> {
// if a generic AssumeNoRevert, return Ok(). Otherwise, iterate over acceptable reasons and try
// to match against any, otherwise, return an Error with the revert data
if assume_no_revert.reasons.is_empty() {
Ok(())
} else {
assume_no_revert
.reasons
.iter()
.find_map(|reason| {
handle_revert(
false,
reason,
status,
retdata,
known_contracts,
assume_no_revert.reverted_by.as_ref(),
)
.ok()
})
.ok_or_else(|| retdata.clone().into())
}
}
pub(crate) fn handle_expect_revert(
is_cheatcode: bool,
is_create: bool,
internal_expect_revert: bool,
expected_revert: &ExpectedRevert,
status: InstructionResult,
retdata: Bytes,
known_contracts: &Option<ContractsByArtifact>,
) -> Result<(Option<Address>, Bytes)> {
let success_return = || {
if is_create {
(Some(DUMMY_CREATE_ADDRESS), Bytes::new())
} else {
(None, DUMMY_CALL_OUTPUT.clone())
}
};
// Check depths if it's not an expect cheatcode call and if internal expect reverts not enabled.
if !is_cheatcode && !internal_expect_revert {
ensure!(
expected_revert.max_depth > expected_revert.depth,
"call didn't revert at a lower depth than cheatcode call depth"
);
}
if expected_revert.count == 0 {
// If no specific reason or reverter is expected, we just check if it reverted
if expected_revert.reverter.is_none() && expected_revert.reason.is_none() {
ensure!(
matches!(status, return_ok!()),
"call reverted when it was expected not to revert"
);
return Ok(success_return());
}
// Flags to track if the reason and reverter match.
let mut reason_match = expected_revert.reason.as_ref().map(|_| false);
let mut reverter_match = expected_revert.reverter.as_ref().map(|_| false);
// If we expect no reverts with a specific reason/reverter, but got a revert,
// we need to check if it matches our criteria
if !matches!(status, return_ok!()) {
// We got a revert, but we expected 0 reverts
// We need to check if this revert matches our expected criteria
// Reverter check
if let (Some(expected_reverter), Some(actual_reverter)) =
(expected_revert.reverter, expected_revert.reverted_by)
&& expected_reverter == actual_reverter
{
reverter_match = Some(true);
}
// Reason check
let expected_reason = expected_revert.reason();
if let Some(expected_reason) = expected_reason {
let mut actual_revert: Vec<u8> = retdata.to_vec();
actual_revert = decode_revert(actual_revert);
if actual_revert == expected_reason {
reason_match = Some(true);
}
}
match (reason_match, reverter_match) {
(Some(true), Some(true)) => Err(fmt_err!(
"expected 0 reverts with reason: {}, from address: {}, but got one",
stringify(expected_reason.unwrap_or_default()),
expected_revert.reverter.unwrap()
)),
(Some(true), None) => Err(fmt_err!(
"expected 0 reverts with reason: {}, but got one",
stringify(expected_reason.unwrap_or_default())
)),
(None, Some(true)) => Err(fmt_err!(
"expected 0 reverts from address: {}, but got one",
expected_revert.reverter.unwrap()
)),
_ => {
// The revert doesn't match our criteria, which means it's a different revert
// For expectRevert with count=0, any revert should fail the test
let decoded_revert = decode_revert(retdata.to_vec());
// Provide more specific error messages based on what was expected
if let Some(reverter) = expected_revert.reverter {
if expected_revert.reason.is_some() {
Err(fmt_err!(
"call reverted with '{}' from {}, but expected 0 reverts with reason '{}' from {}",
stringify(&decoded_revert),
expected_revert.reverted_by.unwrap_or_default(),
stringify(expected_reason.unwrap_or_default()),
reverter
))
} else {
Err(fmt_err!(
"call reverted with '{}' from {}, but expected 0 reverts from {}",
stringify(&decoded_revert),
expected_revert.reverted_by.unwrap_or_default(),
reverter
))
}
} else {
Err(fmt_err!(
"call reverted with '{}' when it was expected not to revert",
stringify(&decoded_revert)
))
}
}
}
} else {
// No revert occurred, which is what we expected
Ok(success_return())
}
} else {
ensure!(!matches!(status, return_ok!()), "next call did not revert as expected");
handle_revert(
is_cheatcode,
expected_revert,
status,
&retdata,
known_contracts,
expected_revert.reverted_by.as_ref(),
)?;
Ok(success_return())
}
}
fn decode_revert(revert: Vec<u8>) -> Vec<u8> {
if matches!(
revert.get(..4).map(|s| s.try_into().unwrap()),
Some(Vm::CheatcodeError::SELECTOR | alloy_sol_types::Revert::SELECTOR)
) && let Ok(decoded) = Vec::<u8>::abi_decode(&revert[4..])
{
return decoded;
}
revert
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/test/expect.rs | crates/cheatcodes/src/test/expect.rs | use std::{
collections::VecDeque,
fmt::{self, Display},
};
use crate::{Cheatcode, Cheatcodes, CheatsCtxt, Error, Result, Vm::*};
use alloy_dyn_abi::{DynSolValue, EventExt};
use alloy_json_abi::Event;
use alloy_primitives::{
Address, Bytes, LogData as RawLog, U256, hex,
map::{AddressHashMap, HashMap, hash_map::Entry},
};
use foundry_common::{abi::get_indexed_event, fmt::format_token};
use foundry_evm_traces::DecodedCallLog;
use revm::{
context::JournalTr,
interpreter::{
InstructionResult, Interpreter, InterpreterAction, interpreter_types::LoopControl,
},
};
use super::revert_handlers::RevertParameters;
/// Tracks the expected calls per address.
///
/// For each address, we track the expected calls per call data. We track it in such manner
/// so that we don't mix together calldatas that only contain selectors and calldatas that contain
/// selector and arguments (partial and full matches).
///
/// This then allows us to customize the matching behavior for each call data on the
/// `ExpectedCallData` struct and track how many times we've actually seen the call on the second
/// element of the tuple.
pub type ExpectedCallTracker = HashMap<Address, HashMap<Bytes, (ExpectedCallData, u64)>>;
#[derive(Clone, Debug)]
pub struct ExpectedCallData {
/// The expected value sent in the call
pub value: Option<U256>,
/// The expected gas supplied to the call
pub gas: Option<u64>,
/// The expected *minimum* gas supplied to the call
pub min_gas: Option<u64>,
/// The number of times the call is expected to be made.
/// If the type of call is `NonCount`, this is the lower bound for the number of calls
/// that must be seen.
/// If the type of call is `Count`, this is the exact number of calls that must be seen.
pub count: u64,
/// The type of expected call.
pub call_type: ExpectedCallType,
}
/// The type of expected call.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ExpectedCallType {
/// The call is expected to be made at least once.
NonCount,
/// The exact number of calls expected.
Count,
}
/// The type of expected revert.
#[derive(Clone, Debug)]
pub enum ExpectedRevertKind {
/// Expects revert from the next non-cheatcode call.
Default,
/// Expects revert from the next cheatcode call.
///
/// The `pending_processing` flag is used to track whether we have exited
/// `expectCheatcodeRevert` context or not.
/// We have to track it to avoid expecting `expectCheatcodeRevert` call to revert itself.
Cheatcode { pending_processing: bool },
}
#[derive(Clone, Debug)]
pub struct ExpectedRevert {
/// The expected data returned by the revert, None being any.
pub reason: Option<Bytes>,
/// The depth at which the revert is expected.
pub depth: usize,
/// The type of expected revert.
pub kind: ExpectedRevertKind,
/// If true then only the first 4 bytes of expected data returned by the revert are checked.
pub partial_match: bool,
/// Contract expected to revert next call.
pub reverter: Option<Address>,
/// Address that reverted the call.
pub reverted_by: Option<Address>,
/// Max call depth reached during next call execution.
pub max_depth: usize,
/// Number of times this revert is expected.
pub count: u64,
/// Actual number of times this revert has been seen.
pub actual_count: u64,
}
#[derive(Clone, Debug)]
pub struct ExpectedEmit {
/// The depth at which we expect this emit to have occurred
pub depth: usize,
/// The log we expect
pub log: Option<RawLog>,
/// The checks to perform:
/// ```text
/// ┌───────┬───────┬───────┬───────┬────┐
/// │topic 0│topic 1│topic 2│topic 3│data│
/// └───────┴───────┴───────┴───────┴────┘
/// ```
pub checks: [bool; 5],
/// If present, check originating address against this
pub address: Option<Address>,
/// If present, relax the requirement that topic 0 must be present. This allows anonymous
/// events with no indexed topics to be matched.
pub anonymous: bool,
/// Whether the log was actually found in the subcalls
pub found: bool,
/// Number of times the log is expected to be emitted
pub count: u64,
/// Stores mismatch details if a log didn't match
pub mismatch_error: Option<String>,
}
#[derive(Clone, Debug)]
pub struct ExpectedCreate {
/// The address that deployed the contract
pub deployer: Address,
/// Runtime bytecode of the contract
pub bytecode: Bytes,
/// Whether deployed with CREATE or CREATE2
pub create_scheme: CreateScheme,
}
#[derive(Clone, Debug)]
pub enum CreateScheme {
Create,
Create2,
}
impl Display for CreateScheme {
fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result {
match self {
Self::Create => write!(f, "CREATE"),
Self::Create2 => write!(f, "CREATE2"),
}
}
}
impl From<revm::context_interface::CreateScheme> for CreateScheme {
fn from(scheme: revm::context_interface::CreateScheme) -> Self {
match scheme {
revm::context_interface::CreateScheme::Create => Self::Create,
revm::context_interface::CreateScheme::Create2 { .. } => Self::Create2,
_ => unimplemented!("Unsupported create scheme"),
}
}
}
impl CreateScheme {
pub fn eq(&self, create_scheme: Self) -> bool {
matches!(
(self, create_scheme),
(Self::Create, Self::Create) | (Self::Create2, Self::Create2 { .. })
)
}
}
impl Cheatcode for expectCall_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, data } = self;
expect_call(state, callee, data, None, None, None, 1, ExpectedCallType::NonCount)
}
}
impl Cheatcode for expectCall_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, data, count } = self;
expect_call(state, callee, data, None, None, None, *count, ExpectedCallType::Count)
}
}
impl Cheatcode for expectCall_2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, data } = self;
expect_call(state, callee, data, Some(msgValue), None, None, 1, ExpectedCallType::NonCount)
}
}
impl Cheatcode for expectCall_3Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, data, count } = self;
expect_call(
state,
callee,
data,
Some(msgValue),
None,
None,
*count,
ExpectedCallType::Count,
)
}
}
impl Cheatcode for expectCall_4Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, gas, data } = self;
expect_call(
state,
callee,
data,
Some(msgValue),
Some(*gas),
None,
1,
ExpectedCallType::NonCount,
)
}
}
impl Cheatcode for expectCall_5Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, gas, data, count } = self;
expect_call(
state,
callee,
data,
Some(msgValue),
Some(*gas),
None,
*count,
ExpectedCallType::Count,
)
}
}
impl Cheatcode for expectCallMinGas_0Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, minGas, data } = self;
expect_call(
state,
callee,
data,
Some(msgValue),
None,
Some(*minGas),
1,
ExpectedCallType::NonCount,
)
}
}
impl Cheatcode for expectCallMinGas_1Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { callee, msgValue, minGas, data, count } = self;
expect_call(
state,
callee,
data,
Some(msgValue),
None,
Some(*minGas),
*count,
ExpectedCallType::Count,
)
}
}
impl Cheatcode for expectEmit_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic1, checkTopic2, checkTopic3, checkData } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[true, checkTopic1, checkTopic2, checkTopic3, checkData],
None,
false,
1,
)
}
}
impl Cheatcode for expectEmit_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic1, checkTopic2, checkTopic3, checkData, emitter } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[true, checkTopic1, checkTopic2, checkTopic3, checkData],
Some(emitter),
false,
1,
)
}
}
impl Cheatcode for expectEmit_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
expect_emit(ccx.state, ccx.ecx.journaled_state.depth(), [true; 5], None, false, 1)
}
}
impl Cheatcode for expectEmit_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { emitter } = *self;
expect_emit(ccx.state, ccx.ecx.journaled_state.depth(), [true; 5], Some(emitter), false, 1)
}
}
impl Cheatcode for expectEmit_4Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic1, checkTopic2, checkTopic3, checkData, count } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[true, checkTopic1, checkTopic2, checkTopic3, checkData],
None,
false,
count,
)
}
}
impl Cheatcode for expectEmit_5Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic1, checkTopic2, checkTopic3, checkData, emitter, count } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[true, checkTopic1, checkTopic2, checkTopic3, checkData],
Some(emitter),
false,
count,
)
}
}
impl Cheatcode for expectEmit_6Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { count } = *self;
expect_emit(ccx.state, ccx.ecx.journaled_state.depth(), [true; 5], None, false, count)
}
}
impl Cheatcode for expectEmit_7Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { emitter, count } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[true; 5],
Some(emitter),
false,
count,
)
}
}
impl Cheatcode for expectEmitAnonymous_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic0, checkTopic1, checkTopic2, checkTopic3, checkData } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[checkTopic0, checkTopic1, checkTopic2, checkTopic3, checkData],
None,
true,
1,
)
}
}
impl Cheatcode for expectEmitAnonymous_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { checkTopic0, checkTopic1, checkTopic2, checkTopic3, checkData, emitter } = *self;
expect_emit(
ccx.state,
ccx.ecx.journaled_state.depth(),
[checkTopic0, checkTopic1, checkTopic2, checkTopic3, checkData],
Some(emitter),
true,
1,
)
}
}
impl Cheatcode for expectEmitAnonymous_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
expect_emit(ccx.state, ccx.ecx.journaled_state.depth(), [true; 5], None, true, 1)
}
}
impl Cheatcode for expectEmitAnonymous_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { emitter } = *self;
expect_emit(ccx.state, ccx.ecx.journaled_state.depth(), [true; 5], Some(emitter), true, 1)
}
}
impl Cheatcode for expectCreateCall {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bytecode, deployer } = self;
expect_create(state, bytecode.clone(), *deployer, CreateScheme::Create)
}
}
impl Cheatcode for expectCreate2Call {
fn apply(&self, state: &mut Cheatcodes) -> Result {
let Self { bytecode, deployer } = self;
expect_create(state, bytecode.clone(), *deployer, CreateScheme::Create2)
}
}
impl Cheatcode for expectRevert_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), false, false, None, 1)
}
}
impl Cheatcode for expectRevert_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
false,
None,
1,
)
}
}
impl Cheatcode for expectRevert_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData } = self;
expect_revert(
ccx.state,
Some(revertData),
ccx.ecx.journaled_state.depth(),
false,
false,
None,
1,
)
}
}
impl Cheatcode for expectRevert_3Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { reverter } = self;
expect_revert(
ccx.state,
None,
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
1,
)
}
}
impl Cheatcode for expectRevert_4Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, reverter } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
1,
)
}
}
impl Cheatcode for expectRevert_5Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, reverter } = self;
expect_revert(
ccx.state,
Some(revertData),
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
1,
)
}
}
impl Cheatcode for expectRevert_6Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { count } = self;
expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), false, false, None, *count)
}
}
impl Cheatcode for expectRevert_7Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, count } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
false,
None,
*count,
)
}
}
impl Cheatcode for expectRevert_8Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, count } = self;
expect_revert(
ccx.state,
Some(revertData),
ccx.ecx.journaled_state.depth(),
false,
false,
None,
*count,
)
}
}
impl Cheatcode for expectRevert_9Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { reverter, count } = self;
expect_revert(
ccx.state,
None,
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
*count,
)
}
}
impl Cheatcode for expectRevert_10Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, reverter, count } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
*count,
)
}
}
impl Cheatcode for expectRevert_11Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, reverter, count } = self;
expect_revert(
ccx.state,
Some(revertData),
ccx.ecx.journaled_state.depth(),
false,
false,
Some(*reverter),
*count,
)
}
}
impl Cheatcode for expectPartialRevert_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
true,
None,
1,
)
}
}
impl Cheatcode for expectPartialRevert_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData, reverter } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
false,
true,
Some(*reverter),
1,
)
}
}
impl Cheatcode for _expectCheatcodeRevert_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
expect_revert(ccx.state, None, ccx.ecx.journaled_state.depth(), true, false, None, 1)
}
}
impl Cheatcode for _expectCheatcodeRevert_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData } = self;
expect_revert(
ccx.state,
Some(revertData.as_ref()),
ccx.ecx.journaled_state.depth(),
true,
false,
None,
1,
)
}
}
impl Cheatcode for _expectCheatcodeRevert_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { revertData } = self;
expect_revert(
ccx.state,
Some(revertData),
ccx.ecx.journaled_state.depth(),
true,
false,
None,
1,
)
}
}
impl Cheatcode for expectSafeMemoryCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { min, max } = *self;
expect_safe_memory(ccx.state, min, max, ccx.ecx.journaled_state.depth().try_into()?)
}
}
impl Cheatcode for stopExpectSafeMemoryCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self {} = self;
ccx.state.allowed_mem_writes.remove(&ccx.ecx.journaled_state.depth().try_into()?);
Ok(Default::default())
}
}
impl Cheatcode for expectSafeMemoryCallCall {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { min, max } = *self;
expect_safe_memory(ccx.state, min, max, (ccx.ecx.journaled_state.depth() + 1).try_into()?)
}
}
impl RevertParameters for ExpectedRevert {
fn reverter(&self) -> Option<Address> {
self.reverter
}
fn reason(&self) -> Option<&[u8]> {
self.reason.as_ref().map(|b| &***b)
}
fn partial_match(&self) -> bool {
self.partial_match
}
}
/// Handles expected calls specified by the `expectCall` cheatcodes.
///
/// It can handle calls in two ways:
/// - If the cheatcode was used with a `count` argument, it will expect the call to be made exactly
/// `count` times. e.g. `vm.expectCall(address(0xc4f3), abi.encodeWithSelector(0xd34db33f), 4)`
/// will expect the call to address(0xc4f3) with selector `0xd34db33f` to be made exactly 4 times.
/// If the amount of calls is less or more than 4, the test will fail. Note that the `count`
/// argument cannot be overwritten with another `vm.expectCall`. If this is attempted,
/// `expectCall` will revert.
/// - If the cheatcode was used without a `count` argument, it will expect the call to be made at
/// least the amount of times the cheatcode was called. This means that `vm.expectCall` without a
/// count argument can be called many times, but cannot be called with a `count` argument after it
/// was called without one. If the latter happens, `expectCall` will revert. e.g
/// `vm.expectCall(address(0xc4f3), abi.encodeWithSelector(0xd34db33f))` will expect the call to
/// address(0xc4f3) and selector `0xd34db33f` to be made at least once. If the amount of calls is
/// 0, the test will fail. If the call is made more than once, the test will pass.
#[expect(clippy::too_many_arguments)] // It is what it is
fn expect_call(
state: &mut Cheatcodes,
target: &Address,
calldata: &Bytes,
value: Option<&U256>,
mut gas: Option<u64>,
mut min_gas: Option<u64>,
count: u64,
call_type: ExpectedCallType,
) -> Result {
let expecteds = state.expected_calls.entry(*target).or_default();
if let Some(val) = value
&& *val > U256::ZERO
{
// If the value of the transaction is non-zero, the EVM adds a call stipend of 2300 gas
// to ensure that the basic fallback function can be called.
let positive_value_cost_stipend = 2300;
if let Some(gas) = &mut gas {
*gas += positive_value_cost_stipend;
}
if let Some(min_gas) = &mut min_gas {
*min_gas += positive_value_cost_stipend;
}
}
match call_type {
ExpectedCallType::Count => {
// Get the expected calls for this target.
// In this case, as we're using counted expectCalls, we should not be able to set them
// more than once.
ensure!(
!expecteds.contains_key(calldata),
"counted expected calls can only bet set once"
);
expecteds.insert(
calldata.clone(),
(ExpectedCallData { value: value.copied(), gas, min_gas, count, call_type }, 0),
);
}
ExpectedCallType::NonCount => {
// Check if the expected calldata exists.
// If it does, increment the count by one as we expect to see it one more time.
match expecteds.entry(calldata.clone()) {
Entry::Occupied(mut entry) => {
let (expected, _) = entry.get_mut();
// Ensure we're not overwriting a counted expectCall.
ensure!(
expected.call_type == ExpectedCallType::NonCount,
"cannot overwrite a counted expectCall with a non-counted expectCall"
);
expected.count += 1;
}
// If it does not exist, then create it.
Entry::Vacant(entry) => {
entry.insert((
ExpectedCallData { value: value.copied(), gas, min_gas, count, call_type },
0,
));
}
}
}
}
Ok(Default::default())
}
fn expect_emit(
state: &mut Cheatcodes,
depth: usize,
checks: [bool; 5],
address: Option<Address>,
anonymous: bool,
count: u64,
) -> Result {
let expected_emit = ExpectedEmit {
depth,
checks,
address,
found: false,
log: None,
anonymous,
count,
mismatch_error: None,
};
if let Some(found_emit_pos) = state.expected_emits.iter().position(|(emit, _)| emit.found) {
// The order of emits already found (back of queue) should not be modified, hence push any
// new emit before first found emit.
state.expected_emits.insert(found_emit_pos, (expected_emit, Default::default()));
} else {
// If no expected emits then push new one at the back of queue.
state.expected_emits.push_back((expected_emit, Default::default()));
}
Ok(Default::default())
}
pub(crate) fn handle_expect_emit(
state: &mut Cheatcodes,
log: &alloy_primitives::Log,
mut interpreter: Option<&mut Interpreter>,
) -> Option<&'static str> {
// This function returns an optional string indicating a failure reason.
// If the string is `Some`, it indicates that the expectation failed with the provided reason.
let mut should_fail = None;
// Fill or check the expected emits.
// We expect for emit checks to be filled as they're declared (from oldest to newest),
// so we fill them and push them to the back of the queue.
// If the user has properly filled all the emits, they'll end up in their original order.
// If not, the queue will not be in the order the events will be intended to be filled,
// and we'll be able to later detect this and bail.
// First, we can return early if all events have been matched.
// This allows a contract to arbitrarily emit more events than expected (additive behavior),
// as long as all the previous events were matched in the order they were expected to be.
if state.expected_emits.iter().all(|(expected, _)| expected.found) {
return should_fail;
}
// Check count=0 expectations against this log - fail immediately if violated
for (expected_emit, _) in &state.expected_emits {
if expected_emit.count == 0
&& !expected_emit.found
&& let Some(expected_log) = &expected_emit.log
&& checks_topics_and_data(expected_emit.checks, expected_log, log)
// Check revert address
&& (expected_emit.address.is_none() || expected_emit.address == Some(log.address))
{
if let Some(interpreter) = &mut interpreter {
// This event was emitted but we expected it NOT to be (count=0)
// Fail immediately
interpreter.bytecode.set_action(InterpreterAction::new_return(
InstructionResult::Revert,
Error::encode("log emitted but expected 0 times"),
interpreter.gas,
));
} else {
should_fail = Some("log emitted but expected 0 times");
}
return should_fail;
}
}
let should_fill_logs = state.expected_emits.iter().any(|(expected, _)| expected.log.is_none());
let index_to_fill_or_check = if should_fill_logs {
// If there's anything to fill, we start with the last event to match in the queue
// (without taking into account events already matched).
state
.expected_emits
.iter()
.position(|(emit, _)| emit.found)
.unwrap_or(state.expected_emits.len())
.saturating_sub(1)
} else {
// if all expected logs are filled, check any unmatched event
// in the declared order, so we start from the front (like a queue).
// Skip count=0 expectations as they are handled separately above
state.expected_emits.iter().position(|(emit, _)| !emit.found && emit.count > 0).unwrap_or(0)
};
// If there are only count=0 expectations left, we can return early
if !should_fill_logs
&& state.expected_emits.iter().all(|(emit, _)| emit.found || emit.count == 0)
{
return should_fail;
}
let (mut event_to_fill_or_check, mut count_map) = state
.expected_emits
.remove(index_to_fill_or_check)
.expect("we should have an emit to fill or check");
let Some(expected) = &event_to_fill_or_check.log else {
// Unless the caller is trying to match an anonymous event, the first topic must be
// filled.
if event_to_fill_or_check.anonymous || !log.topics().is_empty() {
event_to_fill_or_check.log = Some(log.data.clone());
// If we only filled the expected log then we put it back at the same position.
state
.expected_emits
.insert(index_to_fill_or_check, (event_to_fill_or_check, count_map));
} else if let Some(interpreter) = &mut interpreter {
interpreter.bytecode.set_action(InterpreterAction::new_return(
InstructionResult::Revert,
Error::encode("use vm.expectEmitAnonymous to match anonymous events"),
interpreter.gas,
));
} else {
should_fail = Some("use vm.expectEmitAnonymous to match anonymous events");
}
return should_fail;
};
// Increment/set `count` for `log.address` and `log.data`
match count_map.entry(log.address) {
Entry::Occupied(mut entry) => {
let log_count_map = entry.get_mut();
log_count_map.insert(&log.data);
}
Entry::Vacant(entry) => {
let mut log_count_map = LogCountMap::new(&event_to_fill_or_check);
if log_count_map.satisfies_checks(&log.data) {
log_count_map.insert(&log.data);
entry.insert(log_count_map);
}
}
}
event_to_fill_or_check.found = || -> bool {
if !checks_topics_and_data(event_to_fill_or_check.checks, expected, log) {
// Store detailed mismatch information
// Try to decode the events if we have a signature identifier
let (expected_decoded, actual_decoded) = if let Some(signatures_identifier) =
state.signatures_identifier()
&& !event_to_fill_or_check.anonymous
{
(
decode_event(signatures_identifier, expected),
decode_event(signatures_identifier, log),
)
} else {
(None, None)
};
event_to_fill_or_check.mismatch_error = Some(get_emit_mismatch_message(
event_to_fill_or_check.checks,
expected,
log,
event_to_fill_or_check.anonymous,
expected_decoded.as_ref(),
actual_decoded.as_ref(),
));
return false;
}
// Maybe match source address.
if event_to_fill_or_check
.address
.is_some_and(|addr| addr.to_checksum(None) != log.address.to_checksum(None))
{
event_to_fill_or_check.mismatch_error = Some(format!(
"log emitter mismatch: expected={:#x}, got={:#x}",
event_to_fill_or_check.address.unwrap(),
log.address
));
return false;
}
let expected_count = event_to_fill_or_check.count;
match event_to_fill_or_check.address {
Some(emitter) => count_map
.get(&emitter)
.is_some_and(|log_map| log_map.count(&log.data) >= expected_count),
None => count_map
.values()
.find(|log_map| log_map.satisfies_checks(&log.data))
.is_some_and(|map| map.count(&log.data) >= expected_count),
}
}();
// If we found the event, we can push it to the back of the queue
// and begin expecting the next event.
if event_to_fill_or_check.found {
state.expected_emits.push_back((event_to_fill_or_check, count_map));
} else {
// We did not match this event, so we need to keep waiting for the right one to
// appear.
state.expected_emits.push_front((event_to_fill_or_check, count_map));
}
should_fail
}
/// Handles expected emits specified by the `expectEmit` cheatcodes.
///
/// The second element of the tuple counts the number of times the log has been emitted by a
/// particular address
pub type ExpectedEmitTracker = VecDeque<(ExpectedEmit, AddressHashMap<LogCountMap>)>;
#[derive(Clone, Debug, Default)]
pub struct LogCountMap {
checks: [bool; 5],
expected_log: RawLog,
map: HashMap<RawLog, u64>,
}
impl LogCountMap {
/// Instantiates `LogCountMap`.
fn new(expected_emit: &ExpectedEmit) -> Self {
Self {
checks: expected_emit.checks,
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/test/assume.rs | crates/cheatcodes/src/test/assume.rs | use crate::{Cheatcode, Cheatcodes, CheatsCtxt, Error, Result};
use alloy_primitives::Address;
use foundry_evm_core::constants::MAGIC_ASSUME;
use spec::Vm::{
PotentialRevert, assumeCall, assumeNoRevert_0Call, assumeNoRevert_1Call, assumeNoRevert_2Call,
};
use std::fmt::Debug;
#[derive(Clone, Debug)]
pub struct AssumeNoRevert {
/// The call depth at which the cheatcode was added.
pub depth: usize,
/// Acceptable revert parameters for the next call, to be thrown out if they are encountered;
/// reverts with parameters not specified here will count as normal reverts and not rejects
/// towards the counter.
pub reasons: Vec<AcceptableRevertParameters>,
/// Address that reverted the call.
pub reverted_by: Option<Address>,
}
/// Parameters for a single anticipated revert, to be thrown out if encountered.
#[derive(Clone, Debug)]
pub struct AcceptableRevertParameters {
/// The expected revert data returned by the revert
pub reason: Vec<u8>,
/// If true then only the first 4 bytes of expected data returned by the revert are checked.
pub partial_match: bool,
/// Contract expected to revert next call.
pub reverter: Option<Address>,
}
impl AcceptableRevertParameters {
fn from(potential_revert: &PotentialRevert) -> Self {
Self {
reason: potential_revert.revertData.to_vec(),
partial_match: potential_revert.partialMatch,
reverter: if potential_revert.reverter == Address::ZERO {
None
} else {
Some(potential_revert.reverter)
},
}
}
}
impl Cheatcode for assumeCall {
fn apply(&self, _state: &mut Cheatcodes) -> Result {
let Self { condition } = self;
if *condition { Ok(Default::default()) } else { Err(Error::from(MAGIC_ASSUME)) }
}
}
impl Cheatcode for assumeNoRevert_0Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
assume_no_revert(ccx.state, ccx.ecx.journaled_state.depth, vec![])
}
}
impl Cheatcode for assumeNoRevert_1Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { potentialRevert } = self;
assume_no_revert(
ccx.state,
ccx.ecx.journaled_state.depth,
vec![AcceptableRevertParameters::from(potentialRevert)],
)
}
}
impl Cheatcode for assumeNoRevert_2Call {
fn apply_stateful(&self, ccx: &mut CheatsCtxt) -> Result {
let Self { potentialReverts } = self;
assume_no_revert(
ccx.state,
ccx.ecx.journaled_state.depth,
potentialReverts.iter().map(AcceptableRevertParameters::from).collect(),
)
}
}
fn assume_no_revert(
state: &mut Cheatcodes,
depth: usize,
parameters: Vec<AcceptableRevertParameters>,
) -> Result {
ensure!(
state.assume_no_revert.is_none(),
"you must make another external call prior to calling assumeNoRevert again"
);
state.assume_no_revert = Some(AssumeNoRevert { depth, reasons: parameters, reverted_by: None });
Ok(Default::default())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/inspector/analysis.rs | crates/cheatcodes/src/inspector/analysis.rs | //! Cheatcode information, extracted from the syntactic and semantic analysis of the sources.
use foundry_common::fmt::{StructDefinitions, TypeDefMap};
use solar::sema::{self, Compiler, Gcx, hir};
use std::sync::{Arc, OnceLock};
use thiserror::Error;
/// Represents a failure in one of the lazy analysis steps.
#[derive(Debug, Clone, PartialEq, Eq, Error)]
pub enum AnalysisError {
/// Indicates that the resolution of struct definitions failed.
#[error("unable to resolve struct definitions")]
StructDefinitionsResolutionFailed,
}
/// Provides cached, on-demand syntactic and semantic analysis of a completed `Compiler` instance.
///
/// This struct acts as a facade over the `Compiler`, offering lazy-loaded analysis for tools like
/// cheatcode inspectors. It assumes the compiler has already completed parsing and lowering.
///
/// # Adding with new analyses types
///
/// To add support for a new type of cached analysis, follow this pattern:
///
/// 1. Add a new `pub OnceCell<Result<T, AnalysisError>>` field to `CheatcodeAnalysis`, where `T` is
/// the type of the data that you are adding support for.
///
/// 2. Implement a getter method for the new field. Inside the getter, use
/// `self.field.get_or_init()` to compute and cache the value on the first call.
///
/// 3. Inside the closure passed to `get_or_init()`, create a dedicated visitor to traverse the HIR
/// using `self.compiler.enter()` and collect the required data.
///
/// This ensures all analyses remain lazy, efficient, and consistent with the existing design.
#[derive(Clone)]
pub struct CheatcodeAnalysis {
/// A shared, thread-safe reference to solar's `Compiler` instance.
pub compiler: Arc<Compiler>,
/// Cached struct definitions in the sources.
/// Used to keep field order when parsing JSON values.
struct_defs: OnceLock<Result<StructDefinitions, AnalysisError>>,
}
impl std::fmt::Debug for CheatcodeAnalysis {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("CheatcodeAnalysis")
.field("compiler", &"<compiler>")
.field("struct_defs", &self.struct_defs)
.finish()
}
}
impl CheatcodeAnalysis {
pub fn new(compiler: Arc<solar::sema::Compiler>) -> Self {
Self { compiler, struct_defs: OnceLock::new() }
}
/// Lazily initializes and returns the struct definitions.
pub fn struct_defs(&self) -> Result<&StructDefinitions, &AnalysisError> {
self.struct_defs
.get_or_init(|| {
self.compiler.enter(|compiler| {
let gcx = compiler.gcx();
StructDefinitionResolver::new(gcx).process()
})
})
.as_ref()
}
}
// -- STRUCT DEFINITIONS -------------------------------------------------------
/// Generates a map of all struct definitions from the HIR using the resolved `Ty` system.
struct StructDefinitionResolver<'gcx> {
gcx: Gcx<'gcx>,
struct_defs: TypeDefMap,
}
impl<'gcx> StructDefinitionResolver<'gcx> {
/// Constructs a new generator.
pub fn new(gcx: Gcx<'gcx>) -> Self {
Self { gcx, struct_defs: TypeDefMap::new() }
}
/// Processes the HIR to generate all the struct definitions.
pub fn process(mut self) -> Result<StructDefinitions, AnalysisError> {
for id in self.hir().strukt_ids() {
self.resolve_struct_definition(id)?;
}
Ok(self.struct_defs.into())
}
#[inline]
fn hir(&self) -> &'gcx hir::Hir<'gcx> {
&self.gcx.hir
}
/// The recursive core of the generator. Resolves a single struct and adds it to the cache.
fn resolve_struct_definition(&mut self, id: hir::StructId) -> Result<(), AnalysisError> {
let qualified_name = self.get_fully_qualified_name(id);
if self.struct_defs.contains_key(&qualified_name) {
return Ok(());
}
let hir = self.hir();
let strukt = hir.strukt(id);
let mut fields = Vec::with_capacity(strukt.fields.len());
for &field_id in strukt.fields {
let var = hir.variable(field_id);
let name =
var.name.ok_or(AnalysisError::StructDefinitionsResolutionFailed)?.to_string();
if let Some(ty_str) = self.ty_to_string(self.gcx.type_of_hir_ty(&var.ty)) {
fields.push((name, ty_str));
}
}
// Only insert if there are fields, to avoid adding empty entries
if !fields.is_empty() {
self.struct_defs.insert(qualified_name, fields);
}
Ok(())
}
/// Converts a resolved `Ty` into its canonical string representation.
fn ty_to_string(&mut self, ty: sema::Ty<'gcx>) -> Option<String> {
let ty = ty.peel_refs();
let res = match ty.kind {
sema::ty::TyKind::Elementary(e) => e.to_string(),
sema::ty::TyKind::Array(ty, size) => {
let inner_type = self.ty_to_string(ty)?;
format!("{inner_type}[{size}]")
}
sema::ty::TyKind::DynArray(ty) => {
let inner_type = self.ty_to_string(ty)?;
format!("{inner_type}[]")
}
sema::ty::TyKind::Struct(id) => {
// Ensure the nested struct is resolved before proceeding.
self.resolve_struct_definition(id).ok()?;
self.get_fully_qualified_name(id)
}
sema::ty::TyKind::Udvt(ty, _) => self.ty_to_string(ty)?,
// For now, map enums to `uint8`
sema::ty::TyKind::Enum(_) => "uint8".to_string(),
// For now, map contracts to `address`
sema::ty::TyKind::Contract(_) => "address".to_string(),
// Explicitly disallow unsupported types
_ => return None,
};
Some(res)
}
/// Helper to get the fully qualified name `Contract.Struct`.
fn get_fully_qualified_name(&self, id: hir::StructId) -> String {
let hir = self.hir();
let strukt = hir.strukt(id);
if let Some(contract_id) = strukt.contract {
format!("{}.{}", hir.contract(contract_id).name.as_str(), strukt.name.as_str())
} else {
strukt.name.as_str().into()
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/src/inspector/utils.rs | crates/cheatcodes/src/inspector/utils.rs | use super::Ecx;
use crate::inspector::Cheatcodes;
use alloy_primitives::{Address, Bytes, U256};
use revm::interpreter::{CreateInputs, CreateScheme};
/// Common behaviour of legacy and EOF create inputs.
pub(crate) trait CommonCreateInput {
fn caller(&self) -> Address;
fn gas_limit(&self) -> u64;
fn value(&self) -> U256;
fn init_code(&self) -> Bytes;
fn scheme(&self) -> Option<CreateScheme>;
fn set_caller(&mut self, caller: Address);
fn log_debug(&self, cheatcode: &mut Cheatcodes, scheme: &CreateScheme);
fn allow_cheatcodes(&self, cheatcodes: &mut Cheatcodes, ecx: Ecx) -> Address;
}
impl CommonCreateInput for &mut CreateInputs {
fn caller(&self) -> Address {
self.caller
}
fn gas_limit(&self) -> u64 {
self.gas_limit
}
fn value(&self) -> U256 {
self.value
}
fn init_code(&self) -> Bytes {
self.init_code.clone()
}
fn scheme(&self) -> Option<CreateScheme> {
Some(self.scheme)
}
fn set_caller(&mut self, caller: Address) {
self.caller = caller;
}
fn log_debug(&self, cheatcode: &mut Cheatcodes, scheme: &CreateScheme) {
let kind = match scheme {
CreateScheme::Create => "create",
CreateScheme::Create2 { .. } => "create2",
CreateScheme::Custom { .. } => "custom",
};
debug!(target: "cheatcodes", tx=?cheatcode.broadcastable_transactions.back().unwrap(), "broadcastable {kind}");
}
fn allow_cheatcodes(&self, cheatcodes: &mut Cheatcodes, ecx: Ecx) -> Address {
let old_nonce = ecx
.journaled_state
.state
.get(&self.caller)
.map(|acc| acc.info.nonce)
.unwrap_or_default();
let created_address = self.created_address(old_nonce);
cheatcodes.allow_cheatcodes_on_create(ecx, self.caller, created_address);
created_address
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/spec/src/vm.rs | crates/cheatcodes/spec/src/vm.rs | // We don't document function parameters individually so we can't enable `missing_docs` for this
// module. Instead, we emit custom diagnostics in `#[derive(Cheatcode)]`.
#![allow(missing_docs)]
use super::*;
use crate::Vm::ForgeContext;
use alloy_sol_types::sol;
use foundry_macros::Cheatcode;
sol! {
// Cheatcodes are marked as view/pure/none using the following rules:
// 0. A call's observable behaviour includes its return value, logs, reverts and state writes,
// 1. If you can influence a later call's observable behaviour, you're neither `view` nor `pure`
// (you are modifying some state be it the EVM, interpreter, filesystem, etc),
// 2. Otherwise if you can be influenced by an earlier call, or if reading some state, you're `view`,
// 3. Otherwise you're `pure`.
/// Foundry cheatcodes interface.
#[derive(Debug, Cheatcode)] // Keep this list small to avoid unnecessary bloat.
#[sol(abi)]
interface Vm {
// ======== Types ========
/// Error thrown by cheatcodes.
error CheatcodeError(string message);
/// A modification applied to either `msg.sender` or `tx.origin`. Returned by `readCallers`.
enum CallerMode {
/// No caller modification is currently active.
None,
/// A one time broadcast triggered by a `vm.broadcast()` call is currently active.
Broadcast,
/// A recurrent broadcast triggered by a `vm.startBroadcast()` call is currently active.
RecurrentBroadcast,
/// A one time prank triggered by a `vm.prank()` call is currently active.
Prank,
/// A recurrent prank triggered by a `vm.startPrank()` call is currently active.
RecurrentPrank,
}
/// The kind of account access that occurred.
enum AccountAccessKind {
/// The account was called.
Call,
/// The account was called via delegatecall.
DelegateCall,
/// The account was called via callcode.
CallCode,
/// The account was called via staticcall.
StaticCall,
/// The account was created.
Create,
/// The account was selfdestructed.
SelfDestruct,
/// Synthetic access indicating the current context has resumed after a previous sub-context (AccountAccess).
Resume,
/// The account's balance was read.
Balance,
/// The account's codesize was read.
Extcodesize,
/// The account's codehash was read.
Extcodehash,
/// The account's code was copied.
Extcodecopy,
}
/// Forge execution contexts.
enum ForgeContext {
/// Test group execution context (test, coverage or snapshot).
TestGroup,
/// `forge test` execution context.
Test,
/// `forge coverage` execution context.
Coverage,
/// `forge snapshot` execution context.
Snapshot,
/// Script group execution context (dry run, broadcast or resume).
ScriptGroup,
/// `forge script` execution context.
ScriptDryRun,
/// `forge script --broadcast` execution context.
ScriptBroadcast,
/// `forge script --resume` execution context.
ScriptResume,
/// Unknown `forge` execution context.
Unknown,
}
/// An Ethereum log. Returned by `getRecordedLogs`.
struct Log {
/// The topics of the log, including the signature, if any.
bytes32[] topics;
/// The raw data of the log.
bytes data;
/// The address of the log's emitter.
address emitter;
}
/// Gas used. Returned by `lastCallGas`.
struct Gas {
/// The gas limit of the call.
uint64 gasLimit;
/// The total gas used.
uint64 gasTotalUsed;
/// DEPRECATED: The amount of gas used for memory expansion. Ref: <https://github.com/foundry-rs/foundry/pull/7934#pullrequestreview-2069236939>
uint64 gasMemoryUsed;
/// The amount of gas refunded.
int64 gasRefunded;
/// The amount of gas remaining.
uint64 gasRemaining;
}
/// An RPC URL and its alias. Returned by `rpcUrlStructs`.
struct Rpc {
/// The alias of the RPC URL.
string key;
/// The RPC URL.
string url;
}
/// An RPC log object. Returned by `eth_getLogs`.
struct EthGetLogs {
/// The address of the log's emitter.
address emitter;
/// The topics of the log, including the signature, if any.
bytes32[] topics;
/// The raw data of the log.
bytes data;
/// The block hash.
bytes32 blockHash;
/// The block number.
uint64 blockNumber;
/// The transaction hash.
bytes32 transactionHash;
/// The transaction index in the block.
uint64 transactionIndex;
/// The log index.
uint256 logIndex;
/// Whether the log was removed.
bool removed;
}
/// A single entry in a directory listing. Returned by `readDir`.
struct DirEntry {
/// The error message, if any.
string errorMessage;
/// The path of the entry.
string path;
/// The depth of the entry.
uint64 depth;
/// Whether the entry is a directory.
bool isDir;
/// Whether the entry is a symlink.
bool isSymlink;
}
/// Metadata information about a file.
///
/// This structure is returned from the `fsMetadata` function and represents known
/// metadata about a file such as its permissions, size, modification
/// times, etc.
struct FsMetadata {
/// True if this metadata is for a directory.
bool isDir;
/// True if this metadata is for a symlink.
bool isSymlink;
/// The size of the file, in bytes, this metadata is for.
uint256 length;
/// True if this metadata is for a readonly (unwritable) file.
bool readOnly;
/// The last modification time listed in this metadata.
uint256 modified;
/// The last access time of this metadata.
uint256 accessed;
/// The creation time listed in this metadata.
uint256 created;
}
/// A wallet with a public and private key.
struct Wallet {
/// The wallet's address.
address addr;
/// The wallet's public key `X`.
uint256 publicKeyX;
/// The wallet's public key `Y`.
uint256 publicKeyY;
/// The wallet's private key.
uint256 privateKey;
}
/// The result of a `tryFfi` call.
struct FfiResult {
/// The exit code of the call.
int32 exitCode;
/// The optionally hex-decoded `stdout` data.
bytes stdout;
/// The `stderr` data.
bytes stderr;
}
/// Information on the chain and fork.
struct ChainInfo {
/// The fork identifier. Set to zero if no fork is active.
uint256 forkId;
/// The chain ID of the current fork.
uint256 chainId;
}
/// Information about a blockchain.
struct Chain {
/// The chain name.
string name;
/// The chain's Chain ID.
uint256 chainId;
/// The chain's alias. (i.e. what gets specified in `foundry.toml`).
string chainAlias;
/// A default RPC endpoint for this chain.
string rpcUrl;
}
/// The storage accessed during an `AccountAccess`.
struct StorageAccess {
/// The account whose storage was accessed.
address account;
/// The slot that was accessed.
bytes32 slot;
/// If the access was a write.
bool isWrite;
/// The previous value of the slot.
bytes32 previousValue;
/// The new value of the slot.
bytes32 newValue;
/// If the access was reverted.
bool reverted;
}
/// An EIP-2930 access list item.
struct AccessListItem {
/// The address to be added in access list.
address target;
/// The storage keys to be added in access list.
bytes32[] storageKeys;
}
/// The result of a `stopAndReturnStateDiff` call.
struct AccountAccess {
/// The chain and fork the access occurred.
ChainInfo chainInfo;
/// The kind of account access that determines what the account is.
/// If kind is Call, DelegateCall, StaticCall or CallCode, then the account is the callee.
/// If kind is Create, then the account is the newly created account.
/// If kind is SelfDestruct, then the account is the selfdestruct recipient.
/// If kind is a Resume, then account represents a account context that has resumed.
AccountAccessKind kind;
/// The account that was accessed.
/// It's either the account created, callee or a selfdestruct recipient for CREATE, CALL or SELFDESTRUCT.
address account;
/// What accessed the account.
address accessor;
/// If the account was initialized or empty prior to the access.
/// An account is considered initialized if it has code, a
/// non-zero nonce, or a non-zero balance.
bool initialized;
/// The previous balance of the accessed account.
uint256 oldBalance;
/// The potential new balance of the accessed account.
/// That is, all balance changes are recorded here, even if reverts occurred.
uint256 newBalance;
/// Code of the account deployed by CREATE.
bytes deployedCode;
/// Value passed along with the account access
uint256 value;
/// Input data provided to the CREATE or CALL
bytes data;
/// If this access reverted in either the current or parent context.
bool reverted;
/// An ordered list of storage accesses made during an account access operation.
StorageAccess[] storageAccesses;
/// Call depth traversed during the recording of state differences
uint64 depth;
/// The previous nonce of the accessed account.
uint64 oldNonce;
/// The new nonce of the accessed account.
uint64 newNonce;
}
/// The result of the `stopDebugTraceRecording` call
struct DebugStep {
/// The stack before executing the step of the run.
/// stack\[0\] represents the top of the stack.
/// and only stack data relevant to the opcode execution is contained.
uint256[] stack;
/// The memory input data before executing the step of the run.
/// only input data relevant to the opcode execution is contained.
///
/// e.g. for MLOAD, it will have memory\[offset:offset+32\] copied here.
/// the offset value can be get by the stack data.
bytes memoryInput;
/// The opcode that was accessed.
uint8 opcode;
/// The call depth of the step.
uint64 depth;
/// Whether the call end up with out of gas error.
bool isOutOfGas;
/// The contract address where the opcode is running
address contractAddr;
}
/// The transaction type (`txType`) of the broadcast.
enum BroadcastTxType {
/// Represents a CALL broadcast tx.
Call,
/// Represents a CREATE broadcast tx.
Create,
/// Represents a CREATE2 broadcast tx.
Create2
}
/// Represents a transaction's broadcast details.
struct BroadcastTxSummary {
/// The hash of the transaction that was broadcasted
bytes32 txHash;
/// Represent the type of transaction among CALL, CREATE, CREATE2
BroadcastTxType txType;
/// The address of the contract that was called or created.
/// This is address of the contract that is created if the txType is CREATE or CREATE2.
address contractAddress;
/// The block number the transaction landed in.
uint64 blockNumber;
/// Status of the transaction, retrieved from the transaction receipt.
bool success;
}
/// Holds a signed EIP-7702 authorization for an authority account to delegate to an implementation.
struct SignedDelegation {
/// The y-parity of the recovered secp256k1 signature (0 or 1).
uint8 v;
/// First 32 bytes of the signature.
bytes32 r;
/// Second 32 bytes of the signature.
bytes32 s;
/// The current nonce of the authority account at signing time.
/// Used to ensure signature can't be replayed after account nonce changes.
uint64 nonce;
/// Address of the contract implementation that will be delegated to.
/// Gets encoded into delegation code: 0xef0100 || implementation.
address implementation;
}
/// Represents a "potential" revert reason from a single subsequent call when using `vm.assumeNoReverts`.
/// Reverts that match will result in a FOUNDRY::ASSUME rejection, whereas unmatched reverts will be surfaced
/// as normal.
struct PotentialRevert {
/// The allowed origin of the revert opcode; address(0) allows reverts from any address
address reverter;
/// When true, only matches on the beginning of the revert data, otherwise, matches on entire revert data
bool partialMatch;
/// The data to use to match encountered reverts
bytes revertData;
}
// ======== EVM ========
/// Gets the address for a given private key.
#[cheatcode(group = Evm, safety = Safe)]
function addr(uint256 privateKey) external pure returns (address keyAddr);
/// Dump a genesis JSON file's `allocs` to disk.
#[cheatcode(group = Evm, safety = Unsafe)]
function dumpState(string calldata pathToStateJson) external;
/// Gets the nonce of an account.
#[cheatcode(group = Evm, safety = Safe)]
function getNonce(address account) external view returns (uint64 nonce);
/// Get the nonce of a `Wallet`.
#[cheatcode(group = Evm, safety = Safe)]
function getNonce(Wallet calldata wallet) external view returns (uint64 nonce);
/// Loads a storage slot from an address.
#[cheatcode(group = Evm, safety = Safe)]
function load(address target, bytes32 slot) external view returns (bytes32 data);
/// Load a genesis JSON file's `allocs` into the in-memory EVM state.
#[cheatcode(group = Evm, safety = Unsafe)]
function loadAllocs(string calldata pathToAllocsJson) external;
// -------- Record Debug Traces --------
/// Records the debug trace during the run.
#[cheatcode(group = Evm, safety = Safe)]
function startDebugTraceRecording() external;
/// Stop debug trace recording and returns the recorded debug trace.
#[cheatcode(group = Evm, safety = Safe)]
function stopAndReturnDebugTraceRecording() external returns (DebugStep[] memory step);
/// Clones a source account code, state, balance and nonce to a target account and updates in-memory EVM state.
#[cheatcode(group = Evm, safety = Unsafe)]
function cloneAccount(address source, address target) external;
// -------- Record Storage --------
/// Records all storage reads and writes. Use `accesses` to get the recorded data.
/// Subsequent calls to `record` will clear the previous data.
#[cheatcode(group = Evm, safety = Safe)]
function record() external;
/// Stops recording storage reads and writes.
#[cheatcode(group = Evm, safety = Safe)]
function stopRecord() external;
/// Gets all accessed reads and write slot from a `vm.record` session, for a given address.
#[cheatcode(group = Evm, safety = Safe)]
function accesses(address target) external view returns (bytes32[] memory readSlots, bytes32[] memory writeSlots);
/// Record all account accesses as part of CREATE, CALL or SELFDESTRUCT opcodes in order,
/// along with the context of the calls
#[cheatcode(group = Evm, safety = Safe)]
function startStateDiffRecording() external;
/// Returns an ordered array of all account accesses from a `vm.startStateDiffRecording` session.
#[cheatcode(group = Evm, safety = Safe)]
function stopAndReturnStateDiff() external returns (AccountAccess[] memory accountAccesses);
/// Returns state diffs from current `vm.startStateDiffRecording` session.
#[cheatcode(group = Evm, safety = Safe)]
function getStateDiff() external view returns (string memory diff);
/// Returns state diffs from current `vm.startStateDiffRecording` session, in json format.
#[cheatcode(group = Evm, safety = Safe)]
function getStateDiffJson() external view returns (string memory diff);
/// Returns an array of storage slots occupied by the specified variable.
#[cheatcode(group = Evm, safety = Safe)]
function getStorageSlots(address target, string calldata variableName) external view returns (uint256[] memory slots);
/// Returns an array of `StorageAccess` from current `vm.stateStateDiffRecording` session
#[cheatcode(group = Evm, safety = Safe)]
function getStorageAccesses() external view returns (StorageAccess[] memory storageAccesses);
// -------- Recording Map Writes --------
/// Starts recording all map SSTOREs for later retrieval.
#[cheatcode(group = Evm, safety = Safe)]
function startMappingRecording() external;
/// Stops recording all map SSTOREs for later retrieval and clears the recorded data.
#[cheatcode(group = Evm, safety = Safe)]
function stopMappingRecording() external;
/// Gets the number of elements in the mapping at the given slot, for a given address.
#[cheatcode(group = Evm, safety = Safe)]
function getMappingLength(address target, bytes32 mappingSlot) external view returns (uint256 length);
/// Gets the elements at index idx of the mapping at the given slot, for a given address. The
/// index must be less than the length of the mapping (i.e. the number of keys in the mapping).
#[cheatcode(group = Evm, safety = Safe)]
function getMappingSlotAt(address target, bytes32 mappingSlot, uint256 idx) external view returns (bytes32 value);
/// Gets the map key and parent of a mapping at a given slot, for a given address.
#[cheatcode(group = Evm, safety = Safe)]
function getMappingKeyAndParentOf(address target, bytes32 elementSlot)
external
view
returns (bool found, bytes32 key, bytes32 parent);
// -------- Block and Transaction Properties --------
/// Gets the current `block.chainid` of the currently selected environment.
/// You should use this instead of `block.chainid` if you use `vm.selectFork` or `vm.createSelectFork`, as `block.chainid` could be assumed
/// to be constant across a transaction, and as a result will get optimized out by the compiler.
/// See https://github.com/foundry-rs/foundry/issues/6180
#[cheatcode(group = Evm, safety = Safe)]
function getChainId() external view returns (uint256 blockChainId);
/// Sets `block.chainid`.
#[cheatcode(group = Evm, safety = Unsafe)]
function chainId(uint256 newChainId) external;
/// Sets `block.coinbase`.
#[cheatcode(group = Evm, safety = Unsafe)]
function coinbase(address newCoinbase) external;
/// Sets `block.difficulty`.
/// Not available on EVM versions from Paris onwards. Use `prevrandao` instead.
/// Reverts if used on unsupported EVM versions.
#[cheatcode(group = Evm, safety = Unsafe)]
function difficulty(uint256 newDifficulty) external;
/// Sets `block.basefee`.
#[cheatcode(group = Evm, safety = Unsafe)]
function fee(uint256 newBasefee) external;
/// Sets `block.prevrandao`.
/// Not available on EVM versions before Paris. Use `difficulty` instead.
/// If used on unsupported EVM versions it will revert.
#[cheatcode(group = Evm, safety = Unsafe)]
function prevrandao(bytes32 newPrevrandao) external;
/// Sets `block.prevrandao`.
/// Not available on EVM versions before Paris. Use `difficulty` instead.
/// If used on unsupported EVM versions it will revert.
#[cheatcode(group = Evm, safety = Unsafe)]
function prevrandao(uint256 newPrevrandao) external;
/// Sets the blobhashes in the transaction.
/// Not available on EVM versions before Cancun.
/// If used on unsupported EVM versions it will revert.
#[cheatcode(group = Evm, safety = Unsafe)]
function blobhashes(bytes32[] calldata hashes) external;
/// Gets the blockhashes from the current transaction.
/// Not available on EVM versions before Cancun.
/// If used on unsupported EVM versions it will revert.
#[cheatcode(group = Evm, safety = Unsafe)]
function getBlobhashes() external view returns (bytes32[] memory hashes);
/// Sets `block.height`.
#[cheatcode(group = Evm, safety = Unsafe)]
function roll(uint256 newHeight) external;
/// Gets the current `block.number`.
/// You should use this instead of `block.number` if you use `vm.roll`, as `block.number` is assumed to be constant across a transaction,
/// and as a result will get optimized out by the compiler.
/// See https://github.com/foundry-rs/foundry/issues/6180
#[cheatcode(group = Evm, safety = Safe)]
function getBlockNumber() external view returns (uint256 height);
/// Sets `tx.gasprice`.
#[cheatcode(group = Evm, safety = Unsafe)]
function txGasPrice(uint256 newGasPrice) external;
/// Sets `block.timestamp`.
#[cheatcode(group = Evm, safety = Unsafe)]
function warp(uint256 newTimestamp) external;
/// Gets the current `block.timestamp`.
/// You should use this instead of `block.timestamp` if you use `vm.warp`, as `block.timestamp` is assumed to be constant across a transaction,
/// and as a result will get optimized out by the compiler.
/// See https://github.com/foundry-rs/foundry/issues/6180
#[cheatcode(group = Evm, safety = Safe)]
function getBlockTimestamp() external view returns (uint256 timestamp);
/// Gets the RLP encoded block header for a given block number.
/// Returns the block header in the same format as `cast block <block_number> --raw`.
#[cheatcode(group = Evm, safety = Safe)]
function getRawBlockHeader(uint256 blockNumber) external view returns (bytes memory rlpHeader);
/// Sets `block.blobbasefee`
#[cheatcode(group = Evm, safety = Unsafe)]
function blobBaseFee(uint256 newBlobBaseFee) external;
/// Gets the current `block.blobbasefee`.
/// You should use this instead of `block.blobbasefee` if you use `vm.blobBaseFee`, as `block.blobbasefee` is assumed to be constant across a transaction,
/// and as a result will get optimized out by the compiler.
/// See https://github.com/foundry-rs/foundry/issues/6180
#[cheatcode(group = Evm, safety = Safe)]
function getBlobBaseFee() external view returns (uint256 blobBaseFee);
/// Set blockhash for the current block.
/// It only sets the blockhash for blocks where `block.number - 256 <= number < block.number`.
#[cheatcode(group = Evm, safety = Unsafe)]
function setBlockhash(uint256 blockNumber, bytes32 blockHash) external;
// -------- Account State --------
/// Sets an address' balance.
#[cheatcode(group = Evm, safety = Unsafe)]
function deal(address account, uint256 newBalance) external;
/// Sets an address' code.
#[cheatcode(group = Evm, safety = Unsafe)]
function etch(address target, bytes calldata newRuntimeBytecode) external;
/// Resets the nonce of an account to 0 for EOAs and 1 for contract accounts.
#[cheatcode(group = Evm, safety = Unsafe)]
function resetNonce(address account) external;
/// Sets the nonce of an account. Must be higher than the current nonce of the account.
#[cheatcode(group = Evm, safety = Unsafe)]
function setNonce(address account, uint64 newNonce) external;
/// Sets the nonce of an account to an arbitrary value.
#[cheatcode(group = Evm, safety = Unsafe)]
function setNonceUnsafe(address account, uint64 newNonce) external;
/// Stores a value to an address' storage slot.
#[cheatcode(group = Evm, safety = Unsafe)]
function store(address target, bytes32 slot, bytes32 value) external;
/// Marks the slots of an account and the account address as cold.
#[cheatcode(group = Evm, safety = Unsafe)]
function cool(address target) external;
/// Utility cheatcode to set an EIP-2930 access list for all subsequent transactions.
#[cheatcode(group = Evm, safety = Unsafe)]
function accessList(AccessListItem[] calldata access) external;
/// Utility cheatcode to remove any EIP-2930 access list set by `accessList` cheatcode.
#[cheatcode(group = Evm, safety = Unsafe)]
function noAccessList() external;
/// Utility cheatcode to mark specific storage slot as warm, simulating a prior read.
#[cheatcode(group = Evm, safety = Unsafe)]
function warmSlot(address target, bytes32 slot) external;
/// Utility cheatcode to mark specific storage slot as cold, simulating no prior read.
#[cheatcode(group = Evm, safety = Unsafe)]
function coolSlot(address target, bytes32 slot) external;
/// Returns the test or script execution evm version.
///
/// **Note:** The execution evm version is not the same as the compilation one.
#[cheatcode(group = Evm, safety = Safe)]
function getEvmVersion() external pure returns (string memory evm);
/// Set the exact test or script execution evm version, e.g. `berlin`, `cancun`.
///
/// **Note:** The execution evm version is not the same as the compilation one.
#[cheatcode(group = Evm, safety = Safe)]
function setEvmVersion(string calldata evm) external;
// -------- Call Manipulation --------
// --- Mocks ---
/// Clears all mocked calls.
#[cheatcode(group = Evm, safety = Unsafe)]
function clearMockedCalls() external;
/// Mocks a call to an address, returning specified data.
/// Calldata can either be strict or a partial match, e.g. if you only
/// pass a Solidity selector to the expected calldata, then the entire Solidity
/// function will be mocked.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCall(address callee, bytes calldata data, bytes calldata returnData) external;
/// Mocks a call to an address with a specific `msg.value`, returning specified data.
/// Calldata match takes precedence over `msg.value` in case of ambiguity.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCall(address callee, uint256 msgValue, bytes calldata data, bytes calldata returnData) external;
/// Mocks a call to an address, returning specified data.
/// Calldata can either be strict or a partial match, e.g. if you only
/// pass a Solidity selector to the expected calldata, then the entire Solidity
/// function will be mocked.
///
/// Overload to pass the function selector directly `token.approve.selector` instead of `abi.encodeWithSelector(token.approve.selector)`.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCall(address callee, bytes4 data, bytes calldata returnData) external;
/// Mocks a call to an address with a specific `msg.value`, returning specified data.
/// Calldata match takes precedence over `msg.value` in case of ambiguity.
///
/// Overload to pass the function selector directly `token.approve.selector` instead of `abi.encodeWithSelector(token.approve.selector)`.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCall(address callee, uint256 msgValue, bytes4 data, bytes calldata returnData) external;
/// Mocks multiple calls to an address, returning specified data for each call.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCalls(address callee, bytes calldata data, bytes[] calldata returnData) external;
/// Mocks multiple calls to an address with a specific `msg.value`, returning specified data for each call.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCalls(address callee, uint256 msgValue, bytes calldata data, bytes[] calldata returnData) external;
/// Reverts a call to an address with specified revert data.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCallRevert(address callee, bytes calldata data, bytes calldata revertData) external;
/// Reverts a call to an address with a specific `msg.value`, with specified revert data.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCallRevert(address callee, uint256 msgValue, bytes calldata data, bytes calldata revertData)
external;
/// Reverts a call to an address with specified revert data.
///
/// Overload to pass the function selector directly `token.approve.selector` instead of `abi.encodeWithSelector(token.approve.selector)`.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCallRevert(address callee, bytes4 data, bytes calldata revertData) external;
/// Reverts a call to an address with a specific `msg.value`, with specified revert data.
///
/// Overload to pass the function selector directly `token.approve.selector` instead of `abi.encodeWithSelector(token.approve.selector)`.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockCallRevert(address callee, uint256 msgValue, bytes4 data, bytes calldata revertData)
external;
/// Whenever a call is made to `callee` with calldata `data`, this cheatcode instead calls
/// `target` with the same calldata. This functionality is similar to a delegate call made to
/// `target` contract from `callee`.
/// Can be used to substitute a call to a function with another implementation that captures
/// the primary logic of the original function but is easier to reason about.
/// If calldata is not a strict match then partial match by selector is attempted.
#[cheatcode(group = Evm, safety = Unsafe)]
function mockFunction(address callee, address target, bytes calldata data) external;
// --- Impersonation (pranks) ---
/// Sets the *next* call's `msg.sender` to be the input address.
#[cheatcode(group = Evm, safety = Unsafe)]
function prank(address msgSender) external;
/// Sets all subsequent calls' `msg.sender` to be the input address until `stopPrank` is called.
#[cheatcode(group = Evm, safety = Unsafe)]
function startPrank(address msgSender) external;
/// Sets the *next* call's `msg.sender` to be the input address, and the `tx.origin` to be the second input.
#[cheatcode(group = Evm, safety = Unsafe)]
function prank(address msgSender, address txOrigin) external;
/// Sets all subsequent calls' `msg.sender` to be the input address until `stopPrank` is called, and the `tx.origin` to be the second input.
#[cheatcode(group = Evm, safety = Unsafe)]
function startPrank(address msgSender, address txOrigin) external;
/// Sets the *next* delegate call's `msg.sender` to be the input address.
#[cheatcode(group = Evm, safety = Unsafe)]
function prank(address msgSender, bool delegateCall) external;
/// Sets all subsequent delegate calls' `msg.sender` to be the input address until `stopPrank` is called.
#[cheatcode(group = Evm, safety = Unsafe)]
function startPrank(address msgSender, bool delegateCall) external;
/// Sets the *next* delegate call's `msg.sender` to be the input address, and the `tx.origin` to be the second input.
#[cheatcode(group = Evm, safety = Unsafe)]
function prank(address msgSender, address txOrigin, bool delegateCall) external;
/// Sets all subsequent delegate calls' `msg.sender` to be the input address until `stopPrank` is called, and the `tx.origin` to be the second input.
#[cheatcode(group = Evm, safety = Unsafe)]
function startPrank(address msgSender, address txOrigin, bool delegateCall) external;
/// Resets subsequent calls' `msg.sender` to be `address(this)`.
#[cheatcode(group = Evm, safety = Unsafe)]
function stopPrank() external;
/// Reads the current `msg.sender` and `tx.origin` from state and reports if there is any active caller modification.
#[cheatcode(group = Evm, safety = Unsafe)]
function readCallers() external view returns (CallerMode callerMode, address msgSender, address txOrigin);
// ----- Arbitrary Snapshots -----
/// Snapshot capture an arbitrary numerical value by name.
/// The group name is derived from the contract name.
#[cheatcode(group = Evm, safety = Unsafe)]
function snapshotValue(string calldata name, uint256 value) external;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/spec/src/lib.rs | crates/cheatcodes/spec/src/lib.rs | //! Cheatcode specification for Foundry.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt};
mod cheatcode;
pub use cheatcode::{Cheatcode, CheatcodeDef, Group, Safety, Status};
mod function;
pub use function::{Function, Mutability, Visibility};
mod items;
pub use items::{Enum, EnumVariant, Error, Event, Struct, StructField};
mod vm;
pub use vm::Vm;
// The `cheatcodes.json` schema.
/// Foundry cheatcodes. Learn more: <https://book.getfoundry.sh/cheatcodes/>
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct Cheatcodes<'a> {
/// Cheatcode errors.
#[serde(borrow)]
pub errors: Cow<'a, [Error<'a>]>,
/// Cheatcode events.
#[serde(borrow)]
pub events: Cow<'a, [Event<'a>]>,
/// Cheatcode enums.
#[serde(borrow)]
pub enums: Cow<'a, [Enum<'a>]>,
/// Cheatcode structs.
#[serde(borrow)]
pub structs: Cow<'a, [Struct<'a>]>,
/// All the cheatcodes.
#[serde(borrow)]
pub cheatcodes: Cow<'a, [Cheatcode<'a>]>,
}
impl fmt::Display for Cheatcodes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for error in self.errors.iter() {
writeln!(f, "{error}")?;
}
for event in self.events.iter() {
writeln!(f, "{event}")?;
}
for enumm in self.enums.iter() {
writeln!(f, "{enumm}")?;
}
for strukt in self.structs.iter() {
writeln!(f, "{strukt}")?;
}
for cheatcode in self.cheatcodes.iter() {
writeln!(f, "{}", cheatcode.func)?;
}
Ok(())
}
}
impl Default for Cheatcodes<'static> {
fn default() -> Self {
Self::new()
}
}
impl Cheatcodes<'static> {
/// Returns the default cheatcodes.
pub fn new() -> Self {
Self {
// unfortunately technology has not yet advanced to the point where we can get all
// items of a certain type in a module, so we have to hardcode them here
structs: Cow::Owned(vec![
Vm::Log::STRUCT.clone(),
Vm::Rpc::STRUCT.clone(),
Vm::EthGetLogs::STRUCT.clone(),
Vm::DirEntry::STRUCT.clone(),
Vm::FsMetadata::STRUCT.clone(),
Vm::Wallet::STRUCT.clone(),
Vm::FfiResult::STRUCT.clone(),
Vm::ChainInfo::STRUCT.clone(),
Vm::Chain::STRUCT.clone(),
Vm::AccountAccess::STRUCT.clone(),
Vm::StorageAccess::STRUCT.clone(),
Vm::Gas::STRUCT.clone(),
Vm::DebugStep::STRUCT.clone(),
Vm::BroadcastTxSummary::STRUCT.clone(),
Vm::SignedDelegation::STRUCT.clone(),
Vm::PotentialRevert::STRUCT.clone(),
Vm::AccessListItem::STRUCT.clone(),
]),
enums: Cow::Owned(vec![
Vm::CallerMode::ENUM.clone(),
Vm::AccountAccessKind::ENUM.clone(),
Vm::ForgeContext::ENUM.clone(),
Vm::BroadcastTxType::ENUM.clone(),
]),
errors: Vm::VM_ERRORS.iter().copied().cloned().collect(),
events: Cow::Borrowed(&[]),
// events: Vm::VM_EVENTS.iter().copied().cloned().collect(),
cheatcodes: Vm::CHEATCODES.iter().copied().cloned().collect(),
}
}
}
#[cfg(test)]
#[expect(clippy::disallowed_macros)]
mod tests {
use super::*;
use std::{fs, path::Path};
const JSON_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../assets/cheatcodes.json");
#[cfg(feature = "schema")]
const SCHEMA_PATH: &str =
concat!(env!("CARGO_MANIFEST_DIR"), "/../assets/cheatcodes.schema.json");
const IFACE_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../../../testdata/utils/Vm.sol");
/// Generates the `cheatcodes.json` file contents.
fn json_cheatcodes() -> String {
serde_json::to_string_pretty(&Cheatcodes::new()).unwrap()
}
/// Generates the [cheatcodes](json_cheatcodes) JSON schema.
#[cfg(feature = "schema")]
fn json_schema() -> String {
serde_json::to_string_pretty(&schemars::schema_for!(Cheatcodes<'_>)).unwrap()
}
fn sol_iface() -> String {
let mut cheats = Cheatcodes::new();
cheats.errors = Default::default(); // Skip errors to allow <0.8.4.
let cheats = cheats.to_string().trim().replace('\n', "\n ");
format!(
"\
// Automatically generated from `foundry-cheatcodes` Vm definitions. Do not modify manually.
// This interface is just for internal testing purposes. Use `forge-std` instead.
// SPDX-License-Identifier: MIT OR Apache-2.0
pragma solidity >=0.6.2 <0.9.0;
pragma experimental ABIEncoderV2;
interface Vm {{
{cheats}
}}
"
)
}
#[test]
fn spec_up_to_date() {
ensure_file_contents(Path::new(JSON_PATH), &json_cheatcodes());
}
#[test]
#[cfg(feature = "schema")]
fn schema_up_to_date() {
ensure_file_contents(Path::new(SCHEMA_PATH), &json_schema());
}
#[test]
fn iface_up_to_date() {
ensure_file_contents(Path::new(IFACE_PATH), &sol_iface());
}
/// Checks that the `file` has the specified `contents`. If that is not the
/// case, updates the file and then fails the test.
fn ensure_file_contents(file: &Path, contents: &str) {
if let Ok(old_contents) = fs::read_to_string(file)
&& normalize_newlines(&old_contents) == normalize_newlines(contents)
{
// File is already up to date.
return;
}
eprintln!("\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", file.display());
if std::env::var("CI").is_ok() {
eprintln!(" NOTE: run `cargo cheats` locally and commit the updated files\n");
}
if let Some(parent) = file.parent() {
let _ = fs::create_dir_all(parent);
}
fs::write(file, contents).unwrap();
panic!("some file was not up to date and has been updated, simply re-run the tests");
}
fn normalize_newlines(s: &str) -> String {
s.replace("\r\n", "\n")
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/spec/src/items.rs | crates/cheatcodes/spec/src/items.rs | use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt};
/// A Solidity custom error.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct Error<'a> {
/// The name of the error.
pub name: &'a str,
/// The description of the error.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
/// The Solidity error declaration, including full type, parameter names, etc.
pub declaration: &'a str,
}
impl fmt::Display for Error<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.declaration)
}
}
/// A Solidity event.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct Event<'a> {
/// The name of the event.
pub name: &'a str,
/// The description of the event.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
/// The Solidity event declaration, including full type, parameter names, etc.
pub declaration: &'a str,
}
impl fmt::Display for Event<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.declaration)
}
}
/// A Solidity enumeration.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct Enum<'a> {
/// The name of the enum.
pub name: &'a str,
/// The description of the enum.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
/// The variants of the enum.
#[serde(borrow)]
pub variants: Cow<'a, [EnumVariant<'a>]>,
}
impl fmt::Display for Enum<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "enum {} {{ ", self.name)?;
for (i, variant) in self.variants.iter().enumerate() {
if i > 0 {
f.write_str(", ")?;
}
f.write_str(variant.name)?;
}
f.write_str(" }")
}
}
/// A variant of an [`Enum`].
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct EnumVariant<'a> {
/// The name of the variant.
pub name: &'a str,
/// The description of the variant.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
}
/// A Solidity struct.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct Struct<'a> {
/// The name of the struct.
pub name: &'a str,
/// The description of the struct.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
/// The fields of the struct.
#[serde(borrow)]
pub fields: Cow<'a, [StructField<'a>]>,
}
impl fmt::Display for Struct<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "struct {} {{ ", self.name)?;
for field in self.fields.iter() {
write!(f, "{} {}; ", field.ty, field.name)?;
}
f.write_str("}")
}
}
/// A [`Struct`] field.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub struct StructField<'a> {
/// The name of the field.
pub name: &'a str,
/// The type of the field.
pub ty: &'a str,
/// The description of the field.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/spec/src/function.rs | crates/cheatcodes/spec/src/function.rs | use serde::{Deserialize, Serialize};
use std::fmt;
/// Solidity function.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
#[non_exhaustive]
pub struct Function<'a> {
/// The function's unique identifier. This is the function name, optionally appended with an
/// index if it is overloaded.
pub id: &'a str,
/// The description of the function.
/// This is a markdown string derived from the NatSpec documentation.
pub description: &'a str,
/// The Solidity function declaration, including full type and parameter names, visibility,
/// etc.
pub declaration: &'a str,
/// The Solidity function visibility attribute. This is currently always `external`, but this
/// may change in the future.
pub visibility: Visibility,
/// The Solidity function state mutability attribute.
pub mutability: Mutability,
/// The standard function signature used to calculate `selector`.
/// See the [Solidity docs] for more information.
///
/// [Solidity docs]: https://docs.soliditylang.org/en/latest/abi-spec.html#function-selector
pub signature: &'a str,
/// The hex-encoded, "0x"-prefixed 4-byte function selector,
/// which is the Keccak-256 hash of `signature`.
pub selector: &'a str,
/// The 4-byte function selector as a byte array.
pub selector_bytes: [u8; 4],
}
impl fmt::Display for Function<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.declaration)
}
}
/// Solidity function visibility attribute. See the [Solidity docs] for more information.
///
/// [Solidity docs]: https://docs.soliditylang.org/en/latest/contracts.html#function-visibility
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub enum Visibility {
/// The function is only visible externally.
External,
/// Visible externally and internally.
Public,
/// Only visible internally.
Internal,
/// Only visible in the current contract
Private,
}
impl fmt::Display for Visibility {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl Visibility {
/// Returns the string representation of the visibility.
pub const fn as_str(self) -> &'static str {
match self {
Self::External => "external",
Self::Public => "public",
Self::Internal => "internal",
Self::Private => "private",
}
}
}
/// Solidity function state mutability attribute. See the [Solidity docs] for more information.
///
/// [Solidity docs]: https://docs.soliditylang.org/en/latest/contracts.html#state-mutability
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
pub enum Mutability {
/// Disallows modification or access of state.
Pure,
/// Disallows modification of state.
View,
/// Allows modification of state.
#[serde(rename = "")]
None,
}
impl fmt::Display for Mutability {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl Mutability {
/// Returns the string representation of the mutability.
pub const fn as_str(self) -> &'static str {
match self {
Self::Pure => "pure",
Self::View => "view",
Self::None => "",
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cheatcodes/spec/src/cheatcode.rs | crates/cheatcodes/spec/src/cheatcode.rs | use super::Function;
use alloy_sol_types::SolCall;
use serde::{Deserialize, Serialize};
/// Cheatcode definition trait. Implemented by all [`Vm`](crate::Vm) functions.
pub trait CheatcodeDef: std::fmt::Debug + Clone + SolCall {
/// The static cheatcode definition.
const CHEATCODE: &'static Cheatcode<'static>;
}
/// Specification of a single cheatcode. Extends [`Function`] with additional metadata.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[non_exhaustive]
pub struct Cheatcode<'a> {
// Automatically-generated fields.
/// The Solidity function declaration.
#[serde(borrow)]
pub func: Function<'a>,
// Manually-specified fields.
/// The group that the cheatcode belongs to.
pub group: Group,
/// The current status of the cheatcode. E.g. whether it is stable or experimental, etc.
pub status: Status<'a>,
/// Whether the cheatcode is safe to use inside of scripts. E.g. it does not change state in an
/// unexpected way.
pub safety: Safety,
}
/// The status of a cheatcode.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
#[non_exhaustive]
pub enum Status<'a> {
/// The cheatcode and its API is currently stable.
Stable,
/// The cheatcode is unstable, meaning it may contain bugs and may break its API on any
/// release.
///
/// Use of experimental cheatcodes will result in a warning.
Experimental,
/// The cheatcode has been deprecated, meaning it will be removed in a future release.
///
/// Contains the optional reason for deprecation.
///
/// Use of deprecated cheatcodes is discouraged and will result in a warning.
Deprecated(Option<&'a str>),
/// The cheatcode has been removed and is no longer available for use.
///
/// Use of removed cheatcodes will result in a hard error.
Removed,
/// The cheatcode is only used internally for foundry testing and may be changed or removed at
/// any time.
///
/// Use of internal cheatcodes is discouraged and will result in a warning.
Internal,
}
/// Cheatcode groups.
/// Initially derived and modified from inline comments in [`forge-std`'s `Vm.sol`][vmsol].
///
/// [vmsol]: https://github.com/foundry-rs/forge-std/blob/dcb0d52bc4399d37a6545848e3b8f9d03c77b98d/src/Vm.sol
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
#[non_exhaustive]
pub enum Group {
/// Cheatcodes that read from, or write to the current EVM execution state.
///
/// Examples: any of the `record` cheatcodes, `chainId`, `coinbase`.
///
/// Safety: ambiguous, depends on whether the cheatcode is read-only or not.
Evm,
/// Cheatcodes that interact with how a test is run.
///
/// Examples: `assume`, `skip`, `expectRevert`.
///
/// Safety: ambiguous, depends on whether the cheatcode is read-only or not.
Testing,
/// Cheatcodes that interact with how a script is run.
///
/// Examples: `broadcast`, `startBroadcast`, `stopBroadcast`.
///
/// Safety: safe.
Scripting,
/// Cheatcodes that interact with the OS or filesystem.
///
/// Examples: `ffi`, `projectRoot`, `writeFile`.
///
/// Safety: safe.
Filesystem,
/// Cheatcodes that interact with the program's environment variables.
///
/// Examples: `setEnv`, `envBool`, `envOr`.
///
/// Safety: safe.
Environment,
/// Utility cheatcodes that deal with string parsing and manipulation.
///
/// Examples: `toString`. `parseBytes`.
///
/// Safety: safe.
String,
/// Utility cheatcodes that deal with parsing values from and converting values to JSON.
///
/// Examples: `serializeJson`, `parseJsonUint`, `writeJson`.
///
/// Safety: safe.
Json,
/// Utility cheatcodes that deal with parsing values from and converting values to TOML.
///
/// Examples: `parseToml`, `writeToml`.
///
/// Safety: safe.
Toml,
/// Cryptography-related cheatcodes.
///
/// Examples: `sign*`.
///
/// Safety: safe.
Crypto,
/// Generic, uncategorized utilities.
///
/// Examples: `toString`, `parse*`, `serialize*`.
///
/// Safety: safe.
Utilities,
}
impl Group {
/// Returns the safety of this cheatcode group.
///
/// Some groups are inherently safe or unsafe, while others are ambiguous and will return
/// `None`.
pub const fn safety(self) -> Option<Safety> {
match self {
Self::Evm | Self::Testing => None,
Self::Scripting
| Self::Filesystem
| Self::Environment
| Self::String
| Self::Json
| Self::Toml
| Self::Crypto
| Self::Utilities => Some(Safety::Safe),
}
}
/// Returns this value as a string.
pub const fn as_str(self) -> &'static str {
match self {
Self::Evm => "evm",
Self::Testing => "testing",
Self::Scripting => "scripting",
Self::Filesystem => "filesystem",
Self::Environment => "environment",
Self::String => "string",
Self::Json => "json",
Self::Toml => "toml",
Self::Crypto => "crypto",
Self::Utilities => "utilities",
}
}
}
// TODO: Find a better name for this
/// Cheatcode safety.
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[serde(rename_all = "camelCase")]
#[non_exhaustive]
pub enum Safety {
/// The cheatcode is not safe to use in scripts.
Unsafe,
/// The cheatcode is safe to use in scripts.
#[default]
Safe,
}
impl Safety {
/// Returns this value as a string.
pub const fn as_str(self) -> &'static str {
match self {
Self::Safe => "safe",
Self::Unsafe => "unsafe",
}
}
/// Returns whether this value is safe.
pub const fn is_safe(self) -> bool {
matches!(self, Self::Safe)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/lib.rs | crates/primitives/src/lib.rs | //! Foundry primitives
mod network;
mod transaction;
pub use network::*;
pub use transaction::*;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/transaction/envelope.rs | crates/primitives/src/transaction/envelope.rs | use alloy_consensus::{
Sealed, Signed, TransactionEnvelope, TxEip1559, TxEip2930, TxEnvelope, TxLegacy, TxType,
Typed2718,
crypto::RecoveryError,
transaction::{
TxEip7702,
eip4844::{TxEip4844Variant, TxEip4844WithSidecar},
},
};
use alloy_evm::FromRecoveredTx;
use alloy_network::{AnyRpcTransaction, AnyTxEnvelope};
use alloy_primitives::{Address, B256};
use alloy_rlp::Encodable;
use alloy_rpc_types::ConversionError;
use alloy_serde::WithOtherFields;
use op_alloy_consensus::{DEPOSIT_TX_TYPE_ID, OpTransaction as OpTransactionTrait, TxDeposit};
use op_revm::OpTransaction;
use revm::context::TxEnv;
use tempo_primitives::{AASigned, TempoTransaction};
//
/// Container type for signed, typed transactions.
// NOTE(onbjerg): Boxing `Tempo(AASigned)` breaks `TransactionEnvelope` derive macro trait bounds.
#[allow(clippy::large_enum_variant)]
#[derive(Clone, Debug, TransactionEnvelope)]
#[envelope(
tx_type_name = FoundryTxType,
typed = FoundryTypedTx,
)]
pub enum FoundryTxEnvelope {
/// Legacy transaction type
#[envelope(ty = 0)]
Legacy(Signed<TxLegacy>),
/// [EIP-2930] transaction.
///
/// [EIP-2930]: https://eips.ethereum.org/EIPS/eip-2930
#[envelope(ty = 1)]
Eip2930(Signed<TxEip2930>),
/// [EIP-1559] transaction.
///
/// [EIP-1559]: https://eips.ethereum.org/EIPS/eip-1559
#[envelope(ty = 2)]
Eip1559(Signed<TxEip1559>),
/// [EIP-4844] transaction.
///
/// [EIP-4844]: https://eips.ethereum.org/EIPS/eip-4844
#[envelope(ty = 3)]
Eip4844(Signed<TxEip4844Variant>),
/// [EIP-7702] transaction.
///
/// [EIP-7702]: https://eips.ethereum.org/EIPS/eip-7702
#[envelope(ty = 4)]
Eip7702(Signed<TxEip7702>),
/// OP stack deposit transaction.
///
/// See <https://docs.optimism.io/op-stack/bridging/deposit-flow>.
#[envelope(ty = 126)]
Deposit(Sealed<TxDeposit>),
/// Tempo transaction type.
///
/// See <https://docs.tempo.xyz/protocol/transactions>.
#[envelope(ty = 0x76, typed = TempoTransaction)]
Tempo(AASigned),
}
impl FoundryTxEnvelope {
/// Converts the transaction into an Ethereum [`TxEnvelope`].
///
/// Returns an error if the transaction is not part of the standard Ethereum transaction types.
pub fn try_into_eth(self) -> Result<TxEnvelope, Self> {
match self {
Self::Legacy(tx) => Ok(TxEnvelope::Legacy(tx)),
Self::Eip2930(tx) => Ok(TxEnvelope::Eip2930(tx)),
Self::Eip1559(tx) => Ok(TxEnvelope::Eip1559(tx)),
Self::Eip4844(tx) => Ok(TxEnvelope::Eip4844(tx)),
Self::Eip7702(tx) => Ok(TxEnvelope::Eip7702(tx)),
Self::Deposit(_) => Err(self),
Self::Tempo(_) => Err(self),
}
}
pub fn sidecar(&self) -> Option<&TxEip4844WithSidecar> {
match self {
Self::Eip4844(signed_variant) => match signed_variant.tx() {
TxEip4844Variant::TxEip4844WithSidecar(with_sidecar) => Some(with_sidecar),
_ => None,
},
_ => None,
}
}
/// Returns the hash of the transaction.
///
/// # Note
///
/// If this transaction has the Impersonated signature then this returns a modified unique
/// hash. This allows us to treat impersonated transactions as unique.
pub fn hash(&self) -> B256 {
match self {
Self::Legacy(t) => *t.hash(),
Self::Eip2930(t) => *t.hash(),
Self::Eip1559(t) => *t.hash(),
Self::Eip4844(t) => *t.hash(),
Self::Eip7702(t) => *t.hash(),
Self::Deposit(t) => t.tx_hash(),
Self::Tempo(t) => *t.hash(),
}
}
/// Returns the hash if the transaction is impersonated (using a fake signature)
///
/// This appends the `address` before hashing it
pub fn impersonated_hash(&self, sender: Address) -> B256 {
let mut buffer = Vec::new();
Encodable::encode(self, &mut buffer);
buffer.extend_from_slice(sender.as_ref());
B256::from_slice(alloy_primitives::utils::keccak256(&buffer).as_slice())
}
/// Recovers the Ethereum address which was used to sign the transaction.
pub fn recover(&self) -> Result<Address, RecoveryError> {
Ok(match self {
Self::Legacy(tx) => tx.recover_signer()?,
Self::Eip2930(tx) => tx.recover_signer()?,
Self::Eip1559(tx) => tx.recover_signer()?,
Self::Eip4844(tx) => tx.recover_signer()?,
Self::Eip7702(tx) => tx.recover_signer()?,
Self::Deposit(tx) => tx.from,
Self::Tempo(tx) => tx.signature().recover_signer(&tx.signature_hash())?,
})
}
}
impl OpTransactionTrait for FoundryTxEnvelope {
fn is_deposit(&self) -> bool {
matches!(self, Self::Deposit(_))
}
fn as_deposit(&self) -> Option<&Sealed<TxDeposit>> {
match self {
Self::Deposit(tx) => Some(tx),
_ => None,
}
}
}
impl TryFrom<FoundryTxEnvelope> for TxEnvelope {
type Error = FoundryTxEnvelope;
fn try_from(envelope: FoundryTxEnvelope) -> Result<Self, Self::Error> {
envelope.try_into_eth()
}
}
impl TryFrom<AnyRpcTransaction> for FoundryTxEnvelope {
type Error = ConversionError;
fn try_from(value: AnyRpcTransaction) -> Result<Self, Self::Error> {
let WithOtherFields { inner, .. } = value.0;
let from = inner.inner.signer();
match inner.inner.into_inner() {
AnyTxEnvelope::Ethereum(tx) => match tx {
TxEnvelope::Legacy(tx) => Ok(Self::Legacy(tx)),
TxEnvelope::Eip2930(tx) => Ok(Self::Eip2930(tx)),
TxEnvelope::Eip1559(tx) => Ok(Self::Eip1559(tx)),
TxEnvelope::Eip4844(tx) => Ok(Self::Eip4844(tx)),
TxEnvelope::Eip7702(tx) => Ok(Self::Eip7702(tx)),
},
AnyTxEnvelope::Unknown(mut tx) => {
// Try to convert to deposit transaction
if tx.ty() == DEPOSIT_TX_TYPE_ID {
tx.inner.fields.insert("from".to_string(), serde_json::to_value(from).unwrap());
let deposit_tx =
tx.inner.fields.deserialize_into::<TxDeposit>().map_err(|e| {
ConversionError::Custom(format!(
"Failed to deserialize deposit tx: {e}"
))
})?;
return Ok(Self::Deposit(Sealed::new(deposit_tx)));
};
let tx_type = tx.ty();
Err(ConversionError::Custom(format!("Unknown transaction type: 0x{tx_type:02X}")))
}
}
}
}
impl FromRecoveredTx<FoundryTxEnvelope> for TxEnv {
fn from_recovered_tx(tx: &FoundryTxEnvelope, caller: Address) -> Self {
match tx {
FoundryTxEnvelope::Legacy(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip2930(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip1559(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip4844(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip7702(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Deposit(sealed_tx) => {
Self::from_recovered_tx(sealed_tx.inner(), caller)
}
FoundryTxEnvelope::Tempo(_) => panic!("unsupported tx type on ethereum"),
}
}
}
impl FromRecoveredTx<FoundryTxEnvelope> for OpTransaction<TxEnv> {
fn from_recovered_tx(tx: &FoundryTxEnvelope, caller: Address) -> Self {
match tx {
FoundryTxEnvelope::Legacy(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip2930(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip1559(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip4844(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Eip7702(signed_tx) => Self::from_recovered_tx(signed_tx, caller),
FoundryTxEnvelope::Deposit(sealed_tx) => {
Self::from_recovered_tx(sealed_tx.inner(), caller)
}
FoundryTxEnvelope::Tempo(_) => panic!("unsupported tx type on optimism"),
}
}
}
impl std::fmt::Display for FoundryTxType {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::Legacy => write!(f, "legacy"),
Self::Eip2930 => write!(f, "eip2930"),
Self::Eip1559 => write!(f, "eip1559"),
Self::Eip4844 => write!(f, "eip4844"),
Self::Eip7702 => write!(f, "eip7702"),
Self::Deposit => write!(f, "deposit"),
Self::Tempo => write!(f, "tempo"),
}
}
}
impl From<TxType> for FoundryTxType {
fn from(tx: TxType) -> Self {
match tx {
TxType::Legacy => Self::Legacy,
TxType::Eip2930 => Self::Eip2930,
TxType::Eip1559 => Self::Eip1559,
TxType::Eip4844 => Self::Eip4844,
TxType::Eip7702 => Self::Eip7702,
}
}
}
impl From<FoundryTxEnvelope> for FoundryTypedTx {
fn from(envelope: FoundryTxEnvelope) -> Self {
match envelope {
FoundryTxEnvelope::Legacy(signed_tx) => Self::Legacy(signed_tx.strip_signature()),
FoundryTxEnvelope::Eip2930(signed_tx) => Self::Eip2930(signed_tx.strip_signature()),
FoundryTxEnvelope::Eip1559(signed_tx) => Self::Eip1559(signed_tx.strip_signature()),
FoundryTxEnvelope::Eip4844(signed_tx) => Self::Eip4844(signed_tx.strip_signature()),
FoundryTxEnvelope::Eip7702(signed_tx) => Self::Eip7702(signed_tx.strip_signature()),
FoundryTxEnvelope::Deposit(sealed_tx) => Self::Deposit(sealed_tx.into_inner()),
FoundryTxEnvelope::Tempo(signed_tx) => Self::Tempo(signed_tx.strip_signature()),
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use alloy_primitives::{Bytes, Signature, TxHash, TxKind, U256, b256, hex};
use alloy_rlp::Decodable;
use super::*;
#[test]
fn test_decode_call() {
let bytes_first = &mut &hex::decode("f86b02843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba00eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5aea03a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18").unwrap()[..];
let decoded = FoundryTxEnvelope::decode(&mut &bytes_first[..]).unwrap();
let tx = TxLegacy {
nonce: 2u64,
gas_price: 1000000000u128,
gas_limit: 100000,
to: TxKind::Call(Address::from_slice(
&hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..],
)),
value: U256::from(1000000000000000u64),
input: Bytes::default(),
chain_id: Some(4),
};
let signature = Signature::from_str("0eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5ae3a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca182b").unwrap();
let tx = FoundryTxEnvelope::Legacy(Signed::new_unchecked(
tx,
signature,
b256!("0xa517b206d2223278f860ea017d3626cacad4f52ff51030dc9a96b432f17f8d34"),
));
assert_eq!(tx, decoded);
}
#[test]
fn test_decode_create_goerli() {
// test that an example create tx from goerli decodes properly
let tx_bytes =
hex::decode("02f901ee05228459682f008459682f11830209bf8080b90195608060405234801561001057600080fd5b50610175806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80630c49c36c14610030575b600080fd5b61003861004e565b604051610045919061011d565b60405180910390f35b60606020600052600f6020527f68656c6c6f2073746174656d696e64000000000000000000000000000000000060405260406000f35b600081519050919050565b600082825260208201905092915050565b60005b838110156100be5780820151818401526020810190506100a3565b838111156100cd576000848401525b50505050565b6000601f19601f8301169050919050565b60006100ef82610084565b6100f9818561008f565b93506101098185602086016100a0565b610112816100d3565b840191505092915050565b6000602082019050818103600083015261013781846100e4565b90509291505056fea264697066735822122051449585839a4ea5ac23cae4552ef8a96b64ff59d0668f76bfac3796b2bdbb3664736f6c63430008090033c080a0136ebffaa8fc8b9fda9124de9ccb0b1f64e90fbd44251b4c4ac2501e60b104f9a07eb2999eec6d185ef57e91ed099afb0a926c5b536f0155dd67e537c7476e1471")
.unwrap();
let _decoded = FoundryTxEnvelope::decode(&mut &tx_bytes[..]).unwrap();
}
#[test]
fn can_recover_sender() {
// random mainnet tx: https://etherscan.io/tx/0x86718885c4b4218c6af87d3d0b0d83e3cc465df2a05c048aa4db9f1a6f9de91f
let bytes = hex::decode("02f872018307910d808507204d2cb1827d0094388c818ca8b9251b393131c08a736a67ccb19297880320d04823e2701c80c001a0cf024f4815304df2867a1a74e9d2707b6abda0337d2d54a4438d453f4160f190a07ac0e6b3bc9395b5b9c8b9e6d77204a236577a5b18467b9175c01de4faa208d9").unwrap();
let Ok(FoundryTxEnvelope::Eip1559(tx)) = FoundryTxEnvelope::decode(&mut &bytes[..]) else {
panic!("decoding FoundryTxEnvelope failed");
};
assert_eq!(
tx.hash(),
&"0x86718885c4b4218c6af87d3d0b0d83e3cc465df2a05c048aa4db9f1a6f9de91f"
.parse::<B256>()
.unwrap()
);
assert_eq!(
tx.recover_signer().unwrap(),
"0x95222290DD7278Aa3Ddd389Cc1E1d165CC4BAfe5".parse::<Address>().unwrap()
);
}
// Test vector from https://sepolia.etherscan.io/tx/0x9a22ccb0029bc8b0ddd073be1a1d923b7ae2b2ea52100bae0db4424f9107e9c0
// Blobscan: https://sepolia.blobscan.com/tx/0x9a22ccb0029bc8b0ddd073be1a1d923b7ae2b2ea52100bae0db4424f9107e9c0
#[test]
fn test_decode_live_4844_tx() {
use alloy_primitives::{address, b256};
// https://sepolia.etherscan.io/getRawTx?tx=0x9a22ccb0029bc8b0ddd073be1a1d923b7ae2b2ea52100bae0db4424f9107e9c0
let raw_tx = alloy_primitives::hex::decode("0x03f9011d83aa36a7820fa28477359400852e90edd0008252089411e9ca82a3a762b4b5bd264d4173a242e7a770648080c08504a817c800f8a5a0012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921aa00152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4a0013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7a001148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1a0011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e654901a0c8de4cced43169f9aa3d36506363b2d2c44f6c49fc1fd91ea114c86f3757077ea01e11fdd0d1934eda0492606ee0bb80a7bf8f35cc5f86ec60fe5031ba48bfd544").unwrap();
let res = FoundryTxEnvelope::decode(&mut raw_tx.as_slice()).unwrap();
assert!(res.is_type(3));
let tx = match res {
FoundryTxEnvelope::Eip4844(tx) => tx,
_ => unreachable!(),
};
assert_eq!(tx.tx().tx().to, address!("0x11E9CA82A3a762b4B5bd264d4173a242e7a77064"));
assert_eq!(
tx.tx().tx().blob_versioned_hashes,
vec![
b256!("0x012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921a"),
b256!("0x0152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4"),
b256!("0x013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7"),
b256!("0x01148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1"),
b256!("0x011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e6549")
]
);
let from = tx.recover_signer().unwrap();
assert_eq!(from, address!("0xA83C816D4f9b2783761a22BA6FADB0eB0606D7B2"));
}
#[test]
fn test_decode_encode_deposit_tx() {
// https://sepolia-optimism.etherscan.io/tx/0xbf8b5f08c43e4b860715cd64fc0849bbce0d0ea20a76b269e7bc8886d112fca7
let tx_hash: TxHash = "0xbf8b5f08c43e4b860715cd64fc0849bbce0d0ea20a76b269e7bc8886d112fca7"
.parse::<TxHash>()
.unwrap();
// https://sepolia-optimism.etherscan.io/getRawTx?tx=0xbf8b5f08c43e4b860715cd64fc0849bbce0d0ea20a76b269e7bc8886d112fca7
let raw_tx = alloy_primitives::hex::decode(
"7ef861a0dfd7ae78bf3c414cfaa77f13c0205c82eb9365e217b2daa3448c3156b69b27ac94778f2146f48179643473b82931c4cd7b8f153efd94778f2146f48179643473b82931c4cd7b8f153efd872386f26fc10000872386f26fc10000830186a08080",
)
.unwrap();
let dep_tx = FoundryTxEnvelope::decode(&mut raw_tx.as_slice()).unwrap();
let mut encoded = Vec::new();
dep_tx.encode_2718(&mut encoded);
assert_eq!(raw_tx, encoded);
assert_eq!(tx_hash, dep_tx.hash());
}
#[test]
fn can_recover_sender_not_normalized() {
let bytes = hex::decode("f85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804").unwrap();
let Ok(FoundryTxEnvelope::Legacy(tx)) = FoundryTxEnvelope::decode(&mut &bytes[..]) else {
panic!("decoding FoundryTxEnvelope failed");
};
assert_eq!(tx.tx().input, Bytes::from(b""));
assert_eq!(tx.tx().gas_price, 1);
assert_eq!(tx.tx().gas_limit, 21000);
assert_eq!(tx.tx().nonce, 0);
if let TxKind::Call(to) = tx.tx().to {
assert_eq!(
to,
"0x095e7baea6a6c7c4c2dfeb977efac326af552d87".parse::<Address>().unwrap()
);
} else {
panic!("expected a call transaction");
}
assert_eq!(tx.tx().value, U256::from(0x0au64));
assert_eq!(
tx.recover_signer().unwrap(),
"0f65fe9276bc9a24ae7083ae28e2660ef72df99e".parse::<Address>().unwrap()
);
}
#[test]
fn deser_to_type_tx() {
let tx = r#"
{
"type": "0x2",
"chainId": "0x7a69",
"nonce": "0x0",
"gas": "0x5209",
"maxFeePerGas": "0x77359401",
"maxPriorityFeePerGas": "0x1",
"to": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
"value": "0x0",
"accessList": [],
"input": "0x",
"r": "0x85c2794a580da137e24ccc823b45ae5cea99371ae23ee13860fcc6935f8305b0",
"s": "0x41de7fa4121dab284af4453d30928241208bafa90cdb701fe9bc7054759fe3cd",
"yParity": "0x0",
"hash": "0x8c9b68e8947ace33028dba167354fde369ed7bbe34911b772d09b3c64b861515"
}"#;
let _typed_tx: FoundryTxEnvelope = serde_json::from_str(tx).unwrap();
}
#[test]
fn test_from_recovered_tx_legacy() {
let tx = r#"
{
"type": "0x0",
"chainId": "0x1",
"nonce": "0x0",
"gas": "0x5208",
"gasPrice": "0x1",
"to": "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
"value": "0x1",
"input": "0x",
"r": "0x85c2794a580da137e24ccc823b45ae5cea99371ae23ee13860fcc6935f8305b0",
"s": "0x41de7fa4121dab284af4453d30928241208bafa90cdb701fe9bc7054759fe3cd",
"v": "0x1b",
"hash": "0x8c9b68e8947ace33028dba167354fde369ed7bbe34911b772d09b3c64b861515"
}"#;
let typed_tx: FoundryTxEnvelope = serde_json::from_str(tx).unwrap();
let sender = typed_tx.recover().unwrap();
// Test TxEnv conversion via FromRecoveredTx trait
let tx_env = TxEnv::from_recovered_tx(&typed_tx, sender);
assert_eq!(tx_env.caller, sender);
assert_eq!(tx_env.gas_limit, 0x5208);
assert_eq!(tx_env.gas_price, 1);
// Test OpTransaction<TxEnv> conversion via FromRecoveredTx trait
let op_tx = OpTransaction::<TxEnv>::from_recovered_tx(&typed_tx, sender);
assert_eq!(op_tx.base.caller, sender);
assert_eq!(op_tx.base.gas_limit, 0x5208);
}
// Test vector from Tempo testnet:
// https://explorer.testnet.tempo.xyz/tx/0x6d6d8c102064e6dee44abad2024a8b1d37959230baab80e70efbf9b0c739c4fd
#[test]
fn test_decode_encode_tempo_tx() {
use alloy_primitives::address;
use tempo_primitives::TEMPO_TX_TYPE_ID;
let tx_hash: TxHash = "0x6d6d8c102064e6dee44abad2024a8b1d37959230baab80e70efbf9b0c739c4fd"
.parse::<TxHash>()
.unwrap();
// Raw transaction from Tempo testnet via eth_getRawTransactionByHash
let raw_tx = hex::decode(
"76f9025e82a5bd808502cb4178008302d178f8fcf85c9420c000000000000000000000000000000000000080b844095ea7b3000000000000000000000000dec00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000989680f89c94dec000000000000000000000000000000000000080b884f8856c0f00000000000000000000000020c000000000000000000000000000000000000000000000000000000000000020c00000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000989680000000000000000000000000000000000000000000000000000000000097d330c0808080809420c000000000000000000000000000000000000180c0b90133027b98b7a8e6c68d7eac741a52e6fdae0560ce3c16ef5427ad46d7a54d0ed86dd41d000000007b2274797065223a22776562617574686e2e676574222c226368616c6c656e6765223a2238453071464a7a50585167546e645473643649456659457776323173516e626966374c4741776e4b43626b222c226f726967696e223a2268747470733a2f2f74656d706f2d6465782e76657263656c2e617070222c2263726f73734f726967696e223a66616c73657dcfd45c3b19745a42f80b134dcb02a8ba099a0e4e7be1984da54734aa81d8f29f74bb9170ae6d25bd510c83fe35895ee5712efe13980a5edc8094c534e23af85eaacc80b21e45fb11f349424dce3a2f23547f60c0ff2f8bcaede2a247545ce8dd87abf0dbb7a5c9507efae2e43833356651b45ac576c2e61cec4e9c0f41fcbf6e",
)
.unwrap();
let tempo_tx = FoundryTxEnvelope::decode(&mut raw_tx.as_slice()).unwrap();
// Verify it's a Tempo transaction (type 0x76)
assert!(tempo_tx.is_type(TEMPO_TX_TYPE_ID));
let FoundryTxEnvelope::Tempo(ref aa_signed) = tempo_tx else {
panic!("Expected Tempo transaction");
};
// Verify the chain ID
assert_eq!(aa_signed.tx().chain_id, 42429);
// Verify the fee token
assert_eq!(
aa_signed.tx().fee_token,
Some(address!("0x20C0000000000000000000000000000000000001"))
);
// Verify gas limit
assert_eq!(aa_signed.tx().gas_limit, 184696);
// Verify we have 2 calls
assert_eq!(aa_signed.tx().calls.len(), 2);
// Verify the hash
assert_eq!(tx_hash, tempo_tx.hash());
// Verify round-trip encoding
let mut encoded = Vec::new();
tempo_tx.encode_2718(&mut encoded);
assert_eq!(raw_tx, encoded);
// Verify sender recovery (WebAuthn signature)
let sender = tempo_tx.recover().unwrap();
assert_eq!(sender, address!("0x566Ff0f4a6114F8072ecDC8A7A8A13d8d0C6B45F"));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/transaction/receipt.rs | crates/primitives/src/transaction/receipt.rs | use alloy_consensus::{
Eip658Value, Receipt, ReceiptEnvelope, ReceiptWithBloom, TxReceipt, Typed2718,
};
use alloy_network::eip2718::{
Decodable2718, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, EIP7702_TX_TYPE_ID,
Eip2718Error, Encodable2718, LEGACY_TX_TYPE_ID,
};
use alloy_primitives::{Bloom, Log, TxHash, logs_bloom};
use alloy_rlp::{BufMut, Decodable, Encodable, Header, bytes};
use alloy_rpc_types::{BlockNumHash, trace::otterscan::OtsReceipt};
use op_alloy_consensus::{DEPOSIT_TX_TYPE_ID, OpDepositReceipt, OpDepositReceiptWithBloom};
use serde::{Deserialize, Serialize};
use tempo_primitives::TEMPO_TX_TYPE_ID;
use crate::FoundryTxType;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum FoundryReceiptEnvelope<T = Log> {
#[serde(rename = "0x0", alias = "0x00")]
Legacy(ReceiptWithBloom<Receipt<T>>),
#[serde(rename = "0x1", alias = "0x01")]
Eip2930(ReceiptWithBloom<Receipt<T>>),
#[serde(rename = "0x2", alias = "0x02")]
Eip1559(ReceiptWithBloom<Receipt<T>>),
#[serde(rename = "0x3", alias = "0x03")]
Eip4844(ReceiptWithBloom<Receipt<T>>),
#[serde(rename = "0x4", alias = "0x04")]
Eip7702(ReceiptWithBloom<Receipt<T>>),
#[serde(rename = "0x7E", alias = "0x7e")]
Deposit(OpDepositReceiptWithBloom<T>),
#[serde(rename = "0x76")]
Tempo(ReceiptWithBloom<Receipt<T>>),
}
impl FoundryReceiptEnvelope<alloy_rpc_types::Log> {
/// Creates a new [`FoundryReceiptEnvelope`] from the given parts.
pub fn from_parts(
status: bool,
cumulative_gas_used: u64,
logs: impl IntoIterator<Item = alloy_rpc_types::Log>,
tx_type: FoundryTxType,
deposit_nonce: Option<u64>,
deposit_receipt_version: Option<u64>,
) -> Self {
let logs = logs.into_iter().collect::<Vec<_>>();
let logs_bloom = logs_bloom(logs.iter().map(|l| &l.inner).collect::<Vec<_>>());
let inner_receipt =
Receipt { status: Eip658Value::Eip658(status), cumulative_gas_used, logs };
match tx_type {
FoundryTxType::Legacy => {
Self::Legacy(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
FoundryTxType::Eip2930 => {
Self::Eip2930(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
FoundryTxType::Eip1559 => {
Self::Eip1559(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
FoundryTxType::Eip4844 => {
Self::Eip4844(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
FoundryTxType::Eip7702 => {
Self::Eip7702(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
FoundryTxType::Deposit => {
let inner = OpDepositReceiptWithBloom {
receipt: OpDepositReceipt {
inner: inner_receipt,
deposit_nonce,
deposit_receipt_version,
},
logs_bloom,
};
Self::Deposit(inner)
}
FoundryTxType::Tempo => {
Self::Tempo(ReceiptWithBloom { receipt: inner_receipt, logs_bloom })
}
}
}
}
impl FoundryReceiptEnvelope<Log> {
pub fn convert_logs_rpc(
self,
block_numhash: BlockNumHash,
block_timestamp: u64,
transaction_hash: TxHash,
transaction_index: u64,
next_log_index: usize,
) -> FoundryReceiptEnvelope<alloy_rpc_types::Log> {
let logs = self
.logs()
.iter()
.enumerate()
.map(|(index, log)| alloy_rpc_types::Log {
inner: log.clone(),
block_hash: Some(block_numhash.hash),
block_number: Some(block_numhash.number),
block_timestamp: Some(block_timestamp),
transaction_hash: Some(transaction_hash),
transaction_index: Some(transaction_index),
log_index: Some((next_log_index + index) as u64),
removed: false,
})
.collect::<Vec<_>>();
FoundryReceiptEnvelope::<alloy_rpc_types::Log>::from_parts(
self.status(),
self.cumulative_gas_used(),
logs,
self.tx_type(),
self.deposit_nonce(),
self.deposit_receipt_version(),
)
}
}
impl<T> FoundryReceiptEnvelope<T> {
/// Return the [`FoundryTxType`] of the inner receipt.
pub const fn tx_type(&self) -> FoundryTxType {
match self {
Self::Legacy(_) => FoundryTxType::Legacy,
Self::Eip2930(_) => FoundryTxType::Eip2930,
Self::Eip1559(_) => FoundryTxType::Eip1559,
Self::Eip4844(_) => FoundryTxType::Eip4844,
Self::Eip7702(_) => FoundryTxType::Eip7702,
Self::Deposit(_) => FoundryTxType::Deposit,
Self::Tempo(_) => FoundryTxType::Tempo,
}
}
/// Returns the success status of the receipt's transaction.
pub const fn status(&self) -> bool {
self.as_receipt().status.coerce_status()
}
/// Returns the cumulative gas used at this receipt.
pub const fn cumulative_gas_used(&self) -> u64 {
self.as_receipt().cumulative_gas_used
}
/// Converts the receipt's log type by applying a function to each log.
///
/// Returns the receipt with the new log type.
pub fn map_logs<U>(self, f: impl FnMut(T) -> U) -> FoundryReceiptEnvelope<U> {
match self {
Self::Legacy(r) => FoundryReceiptEnvelope::Legacy(r.map_logs(f)),
Self::Eip2930(r) => FoundryReceiptEnvelope::Eip2930(r.map_logs(f)),
Self::Eip1559(r) => FoundryReceiptEnvelope::Eip1559(r.map_logs(f)),
Self::Eip4844(r) => FoundryReceiptEnvelope::Eip4844(r.map_logs(f)),
Self::Eip7702(r) => FoundryReceiptEnvelope::Eip7702(r.map_logs(f)),
Self::Deposit(r) => FoundryReceiptEnvelope::Deposit(r.map_receipt(|r| r.map_logs(f))),
Self::Tempo(r) => FoundryReceiptEnvelope::Tempo(r.map_logs(f)),
}
}
/// Return the receipt logs.
pub fn logs(&self) -> &[T] {
&self.as_receipt().logs
}
/// Consumes the type and returns the logs.
pub fn into_logs(self) -> Vec<T> {
self.into_receipt().logs
}
/// Return the receipt's bloom.
pub const fn logs_bloom(&self) -> &Bloom {
match self {
Self::Legacy(t) => &t.logs_bloom,
Self::Eip2930(t) => &t.logs_bloom,
Self::Eip1559(t) => &t.logs_bloom,
Self::Eip4844(t) => &t.logs_bloom,
Self::Eip7702(t) => &t.logs_bloom,
Self::Deposit(t) => &t.logs_bloom,
Self::Tempo(t) => &t.logs_bloom,
}
}
/// Return the receipt's deposit_nonce if it is a deposit receipt.
pub fn deposit_nonce(&self) -> Option<u64> {
self.as_deposit_receipt().and_then(|r| r.deposit_nonce)
}
/// Return the receipt's deposit version if it is a deposit receipt.
pub fn deposit_receipt_version(&self) -> Option<u64> {
self.as_deposit_receipt().and_then(|r| r.deposit_receipt_version)
}
/// Returns the deposit receipt if it is a deposit receipt.
pub const fn as_deposit_receipt_with_bloom(&self) -> Option<&OpDepositReceiptWithBloom<T>> {
match self {
Self::Deposit(t) => Some(t),
_ => None,
}
}
/// Returns the deposit receipt if it is a deposit receipt.
pub const fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt<T>> {
match self {
Self::Deposit(t) => Some(&t.receipt),
_ => None,
}
}
/// Consumes the type and returns the underlying [`Receipt`].
pub fn into_receipt(self) -> Receipt<T> {
match self {
Self::Legacy(t)
| Self::Eip2930(t)
| Self::Eip1559(t)
| Self::Eip4844(t)
| Self::Eip7702(t)
| Self::Tempo(t) => t.receipt,
Self::Deposit(t) => t.receipt.into_inner(),
}
}
/// Return the inner receipt.
pub const fn as_receipt(&self) -> &Receipt<T> {
match self {
Self::Legacy(t)
| Self::Eip2930(t)
| Self::Eip1559(t)
| Self::Eip4844(t)
| Self::Eip7702(t)
| Self::Tempo(t) => &t.receipt,
Self::Deposit(t) => &t.receipt.inner,
}
}
}
impl<T> TxReceipt for FoundryReceiptEnvelope<T>
where
T: Clone + core::fmt::Debug + PartialEq + Eq + Send + Sync,
{
type Log = T;
fn status_or_post_state(&self) -> Eip658Value {
self.as_receipt().status
}
fn status(&self) -> bool {
self.status()
}
/// Return the receipt's bloom.
fn bloom(&self) -> Bloom {
*self.logs_bloom()
}
fn bloom_cheap(&self) -> Option<Bloom> {
Some(self.bloom())
}
/// Returns the cumulative gas used at this receipt.
fn cumulative_gas_used(&self) -> u64 {
self.cumulative_gas_used()
}
/// Return the receipt logs.
fn logs(&self) -> &[T] {
self.logs()
}
}
impl Encodable for FoundryReceiptEnvelope {
fn encode(&self, out: &mut dyn bytes::BufMut) {
match self {
Self::Legacy(r) => r.encode(out),
receipt => {
let payload_len = match receipt {
Self::Eip2930(r) => r.length() + 1,
Self::Eip1559(r) => r.length() + 1,
Self::Eip4844(r) => r.length() + 1,
Self::Eip7702(r) => r.length() + 1,
Self::Deposit(r) => r.length() + 1,
Self::Tempo(r) => r.length() + 1,
_ => unreachable!("receipt already matched"),
};
match receipt {
Self::Eip2930(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
EIP2930_TX_TYPE_ID.encode(out);
r.encode(out);
}
Self::Eip1559(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
EIP1559_TX_TYPE_ID.encode(out);
r.encode(out);
}
Self::Eip4844(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
EIP4844_TX_TYPE_ID.encode(out);
r.encode(out);
}
Self::Eip7702(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
EIP7702_TX_TYPE_ID.encode(out);
r.encode(out);
}
Self::Deposit(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
DEPOSIT_TX_TYPE_ID.encode(out);
r.encode(out);
}
Self::Tempo(r) => {
Header { list: true, payload_length: payload_len }.encode(out);
TEMPO_TX_TYPE_ID.encode(out);
r.encode(out);
}
_ => unreachable!("receipt already matched"),
}
}
}
}
}
impl Decodable for FoundryReceiptEnvelope {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
use bytes::Buf;
use std::cmp::Ordering;
// a receipt is either encoded as a string (non legacy) or a list (legacy).
// We should not consume the buffer if we are decoding a legacy receipt, so let's
// check if the first byte is between 0x80 and 0xbf.
let rlp_type = *buf
.first()
.ok_or(alloy_rlp::Error::Custom("cannot decode a receipt from empty bytes"))?;
match rlp_type.cmp(&alloy_rlp::EMPTY_LIST_CODE) {
Ordering::Less => {
// strip out the string header
let _header = Header::decode(buf)?;
let receipt_type = *buf.first().ok_or(alloy_rlp::Error::Custom(
"typed receipt cannot be decoded from an empty slice",
))?;
if receipt_type == EIP2930_TX_TYPE_ID {
buf.advance(1);
<ReceiptWithBloom as Decodable>::decode(buf)
.map(FoundryReceiptEnvelope::Eip2930)
} else if receipt_type == EIP1559_TX_TYPE_ID {
buf.advance(1);
<ReceiptWithBloom as Decodable>::decode(buf)
.map(FoundryReceiptEnvelope::Eip1559)
} else if receipt_type == EIP4844_TX_TYPE_ID {
buf.advance(1);
<ReceiptWithBloom as Decodable>::decode(buf)
.map(FoundryReceiptEnvelope::Eip4844)
} else if receipt_type == EIP7702_TX_TYPE_ID {
buf.advance(1);
<ReceiptWithBloom as Decodable>::decode(buf)
.map(FoundryReceiptEnvelope::Eip7702)
} else if receipt_type == DEPOSIT_TX_TYPE_ID {
buf.advance(1);
<OpDepositReceiptWithBloom as Decodable>::decode(buf)
.map(FoundryReceiptEnvelope::Deposit)
} else if receipt_type == TEMPO_TX_TYPE_ID {
buf.advance(1);
<ReceiptWithBloom as Decodable>::decode(buf).map(FoundryReceiptEnvelope::Tempo)
} else {
Err(alloy_rlp::Error::Custom("invalid receipt type"))
}
}
Ordering::Equal => {
Err(alloy_rlp::Error::Custom("an empty list is not a valid receipt encoding"))
}
Ordering::Greater => {
<ReceiptWithBloom as Decodable>::decode(buf).map(FoundryReceiptEnvelope::Legacy)
}
}
}
}
impl Typed2718 for FoundryReceiptEnvelope {
fn ty(&self) -> u8 {
match self {
Self::Legacy(_) => LEGACY_TX_TYPE_ID,
Self::Eip2930(_) => EIP2930_TX_TYPE_ID,
Self::Eip1559(_) => EIP1559_TX_TYPE_ID,
Self::Eip4844(_) => EIP4844_TX_TYPE_ID,
Self::Eip7702(_) => EIP7702_TX_TYPE_ID,
Self::Deposit(_) => DEPOSIT_TX_TYPE_ID,
Self::Tempo(_) => TEMPO_TX_TYPE_ID,
}
}
}
impl Encodable2718 for FoundryReceiptEnvelope {
fn encode_2718_len(&self) -> usize {
match self {
Self::Legacy(r) => ReceiptEnvelope::Legacy(r.clone()).encode_2718_len(),
Self::Eip2930(r) => ReceiptEnvelope::Eip2930(r.clone()).encode_2718_len(),
Self::Eip1559(r) => ReceiptEnvelope::Eip1559(r.clone()).encode_2718_len(),
Self::Eip4844(r) => ReceiptEnvelope::Eip4844(r.clone()).encode_2718_len(),
Self::Eip7702(r) => 1 + r.length(),
Self::Deposit(r) => 1 + r.length(),
Self::Tempo(r) => 1 + r.length(),
}
}
fn encode_2718(&self, out: &mut dyn BufMut) {
if let Some(ty) = self.type_flag() {
out.put_u8(ty);
}
match self {
Self::Legacy(r)
| Self::Eip2930(r)
| Self::Eip1559(r)
| Self::Eip4844(r)
| Self::Eip7702(r)
| Self::Tempo(r) => r.encode(out),
Self::Deposit(r) => r.encode(out),
}
}
}
impl Decodable2718 for FoundryReceiptEnvelope {
fn typed_decode(ty: u8, buf: &mut &[u8]) -> Result<Self, Eip2718Error> {
if ty == DEPOSIT_TX_TYPE_ID {
return Ok(Self::Deposit(OpDepositReceiptWithBloom::decode(buf)?));
}
if ty == TEMPO_TX_TYPE_ID {
return Ok(Self::Tempo(ReceiptWithBloom::decode(buf)?));
}
match ReceiptEnvelope::typed_decode(ty, buf)? {
ReceiptEnvelope::Eip2930(tx) => Ok(Self::Eip2930(tx)),
ReceiptEnvelope::Eip1559(tx) => Ok(Self::Eip1559(tx)),
ReceiptEnvelope::Eip4844(tx) => Ok(Self::Eip4844(tx)),
ReceiptEnvelope::Eip7702(tx) => Ok(Self::Eip7702(tx)),
_ => Err(Eip2718Error::RlpError(alloy_rlp::Error::Custom("unexpected tx type"))),
}
}
fn fallback_decode(buf: &mut &[u8]) -> Result<Self, Eip2718Error> {
match ReceiptEnvelope::fallback_decode(buf)? {
ReceiptEnvelope::Legacy(tx) => Ok(Self::Legacy(tx)),
_ => Err(Eip2718Error::RlpError(alloy_rlp::Error::Custom("unexpected tx type"))),
}
}
}
impl From<FoundryReceiptEnvelope<alloy_rpc_types::Log>> for OtsReceipt {
fn from(receipt: FoundryReceiptEnvelope<alloy_rpc_types::Log>) -> Self {
Self {
status: receipt.status(),
cumulative_gas_used: receipt.cumulative_gas_used(),
logs: Some(receipt.logs().to_vec()),
logs_bloom: Some(receipt.logs_bloom().to_owned()),
r#type: receipt.tx_type() as u8,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{Address, B256, Bytes, LogData, hex};
use std::str::FromStr;
#[test]
fn encode_legacy_receipt() {
let expected = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap();
let mut data = vec![];
let receipt = FoundryReceiptEnvelope::Legacy(ReceiptWithBloom {
receipt: Receipt {
status: false.into(),
cumulative_gas_used: 0x1,
logs: vec![Log {
address: Address::from_str("0000000000000000000000000000000000000011").unwrap(),
data: LogData::new_unchecked(
vec![
B256::from_str(
"000000000000000000000000000000000000000000000000000000000000dead",
)
.unwrap(),
B256::from_str(
"000000000000000000000000000000000000000000000000000000000000beef",
)
.unwrap(),
],
Bytes::from_str("0100ff").unwrap(),
),
}],
},
logs_bloom: [0; 256].into(),
});
receipt.encode(&mut data);
// check that the rlp length equals the length of the expected rlp
assert_eq!(receipt.length(), expected.len());
assert_eq!(data, expected);
}
#[test]
fn decode_legacy_receipt() {
let data = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap();
let expected = FoundryReceiptEnvelope::Legacy(ReceiptWithBloom {
receipt: Receipt {
status: false.into(),
cumulative_gas_used: 0x1,
logs: vec![Log {
address: Address::from_str("0000000000000000000000000000000000000011").unwrap(),
data: LogData::new_unchecked(
vec![
B256::from_str(
"000000000000000000000000000000000000000000000000000000000000dead",
)
.unwrap(),
B256::from_str(
"000000000000000000000000000000000000000000000000000000000000beef",
)
.unwrap(),
],
Bytes::from_str("0100ff").unwrap(),
),
}],
},
logs_bloom: [0; 256].into(),
});
let receipt = FoundryReceiptEnvelope::decode(&mut &data[..]).unwrap();
assert_eq!(receipt, expected);
}
#[test]
fn encode_tempo_receipt() {
use alloy_network::eip2718::Encodable2718;
use tempo_primitives::TEMPO_TX_TYPE_ID;
let receipt = FoundryReceiptEnvelope::Tempo(ReceiptWithBloom {
receipt: Receipt {
status: true.into(),
cumulative_gas_used: 157716,
logs: vec![Log {
address: Address::from_str("20c0000000000000000000000000000000000000").unwrap(),
data: LogData::new_unchecked(
vec![
B256::from_str(
"8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
)
.unwrap(),
B256::from_str(
"000000000000000000000000566ff0f4a6114f8072ecdc8a7a8a13d8d0c6b45f",
)
.unwrap(),
B256::from_str(
"000000000000000000000000dec0000000000000000000000000000000000000",
)
.unwrap(),
],
Bytes::from_str(
"0000000000000000000000000000000000000000000000000000000000989680",
)
.unwrap(),
),
}],
},
logs_bloom: [0; 256].into(),
});
assert_eq!(receipt.tx_type(), FoundryTxType::Tempo);
assert_eq!(receipt.ty(), TEMPO_TX_TYPE_ID);
assert!(receipt.status());
assert_eq!(receipt.cumulative_gas_used(), 157716);
assert_eq!(receipt.logs().len(), 1);
// Encode and decode round-trip
let mut encoded = Vec::new();
receipt.encode_2718(&mut encoded);
// First byte should be the Tempo type ID
assert_eq!(encoded[0], TEMPO_TX_TYPE_ID);
// Decode it back
let decoded = FoundryReceiptEnvelope::decode(&mut &encoded[..]).unwrap();
assert_eq!(receipt, decoded);
}
#[test]
fn decode_tempo_receipt() {
use alloy_network::eip2718::Encodable2718;
use tempo_primitives::TEMPO_TX_TYPE_ID;
let receipt = FoundryReceiptEnvelope::Tempo(ReceiptWithBloom {
receipt: Receipt { status: true.into(), cumulative_gas_used: 21000, logs: vec![] },
logs_bloom: [0; 256].into(),
});
// Encode and decode via 2718
let mut encoded = Vec::new();
receipt.encode_2718(&mut encoded);
assert_eq!(encoded[0], TEMPO_TX_TYPE_ID);
use alloy_network::eip2718::Decodable2718;
let decoded = FoundryReceiptEnvelope::decode_2718(&mut &encoded[..]).unwrap();
assert_eq!(receipt, decoded);
}
#[test]
fn tempo_receipt_from_parts() {
let receipt = FoundryReceiptEnvelope::<alloy_rpc_types::Log>::from_parts(
true,
100000,
vec![],
FoundryTxType::Tempo,
None,
None,
);
assert_eq!(receipt.tx_type(), FoundryTxType::Tempo);
assert!(receipt.status());
assert_eq!(receipt.cumulative_gas_used(), 100000);
assert!(receipt.logs().is_empty());
assert!(receipt.deposit_nonce().is_none());
assert!(receipt.deposit_receipt_version().is_none());
}
#[test]
fn tempo_receipt_map_logs() {
let receipt = FoundryReceiptEnvelope::Tempo(ReceiptWithBloom {
receipt: Receipt {
status: true.into(),
cumulative_gas_used: 21000,
logs: vec![Log {
address: Address::from_str("20c0000000000000000000000000000000000000").unwrap(),
data: LogData::new_unchecked(vec![], Bytes::default()),
}],
},
logs_bloom: [0; 256].into(),
});
// Map logs to a different type (just clone in this case)
let mapped = receipt.map_logs(|log| log);
assert_eq!(mapped.logs().len(), 1);
assert_eq!(mapped.tx_type(), FoundryTxType::Tempo);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/transaction/mod.rs | crates/primitives/src/transaction/mod.rs | mod envelope;
mod receipt;
mod request;
pub use envelope::{FoundryTxEnvelope, FoundryTxType, FoundryTypedTx};
pub use receipt::FoundryReceiptEnvelope;
pub use request::{FoundryTransactionRequest, get_deposit_tx_parts};
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/transaction/request.rs | crates/primitives/src/transaction/request.rs | use alloy_consensus::EthereumTypedTransaction;
use alloy_network::{
BuildResult, NetworkWallet, TransactionBuilder, TransactionBuilder4844, TransactionBuilderError,
};
use alloy_primitives::{Address, B256, ChainId, TxKind, U256};
use alloy_rpc_types::{AccessList, TransactionInputKind, TransactionRequest};
use alloy_serde::{OtherFields, WithOtherFields};
use derive_more::{AsMut, AsRef, From, Into};
use op_alloy_consensus::{DEPOSIT_TX_TYPE_ID, TxDeposit};
use op_revm::transaction::deposit::DepositTransactionParts;
use serde::{Deserialize, Serialize};
use tempo_primitives::{TEMPO_TX_TYPE_ID, TempoTransaction, transaction::Call};
use super::{FoundryTxEnvelope, FoundryTxType, FoundryTypedTx};
use crate::FoundryNetwork;
/// Foundry transaction request builder.
///
/// This is implemented as a wrapper around [`WithOtherFields<TransactionRequest>`],
/// which provides handling of deposit transactions.
#[derive(Clone, Debug, Default, PartialEq, Eq, From, Into, AsRef, AsMut)]
pub struct FoundryTransactionRequest(WithOtherFields<TransactionRequest>);
impl Serialize for FoundryTransactionRequest {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.as_ref().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for FoundryTransactionRequest {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
WithOtherFields::<TransactionRequest>::deserialize(deserializer).map(Self)
}
}
impl FoundryTransactionRequest {
/// Create a new [`FoundryTransactionRequest`] from given
/// [`WithOtherFields<TransactionRequest>`].
#[inline]
pub fn new(inner: WithOtherFields<TransactionRequest>) -> Self {
Self(inner)
}
/// Consume self and return the inner [`WithOtherFields<TransactionRequest>`].
#[inline]
pub fn into_inner(self) -> WithOtherFields<TransactionRequest> {
self.0
}
/// Check if this is a deposit transaction.
#[inline]
pub fn is_deposit(&self) -> bool {
self.as_ref().transaction_type == Some(DEPOSIT_TX_TYPE_ID)
}
/// Check if this is a Tempo transaction.
///
/// Returns true if the transaction type is explicitly set to Tempo (0x76) or if
/// a `feeToken` is set in OtherFields.
#[inline]
pub fn is_tempo(&self) -> bool {
self.as_ref().transaction_type == Some(TEMPO_TX_TYPE_ID)
|| self.as_ref().other.contains_key("feeToken")
|| self.as_ref().other.contains_key("nonceKey")
}
/// Get the Tempo fee token from OtherFields if present.
fn get_tempo_fee_token(&self) -> Option<Address> {
self.as_ref().other.get_deserialized::<Address>("feeToken").transpose().ok().flatten()
}
/// Get the Tempo nonce sequence key from OtherFields if present.
fn get_tempo_nonce_key(&self) -> U256 {
self.as_ref()
.other
.get_deserialized::<U256>("nonceKey")
.transpose()
.ok()
.flatten()
.unwrap_or_default()
}
/// Check if all necessary keys are present to build a Tempo transaction, returning a list of
/// keys that are missing.
pub fn complete_tempo(&self) -> Result<(), Vec<&'static str>> {
let mut missing = Vec::new();
if self.chain_id().is_none() {
missing.push("chain_id");
}
if self.gas_limit().is_none() {
missing.push("gas_limit");
}
if self.max_fee_per_gas().is_none() {
missing.push("max_fee_per_gas");
}
if self.max_priority_fee_per_gas().is_none() {
missing.push("max_priority_fee_per_gas");
}
if self.nonce().is_none() {
missing.push("nonce");
}
if missing.is_empty() { Ok(()) } else { Err(missing) }
}
/// Returns the minimal transaction type this request can be converted into based on the fields
/// that are set. See [`TransactionRequest::preferred_type`].
pub fn preferred_type(&self) -> FoundryTxType {
if self.is_deposit() {
FoundryTxType::Deposit
} else if self.is_tempo() {
FoundryTxType::Tempo
} else {
self.as_ref().preferred_type().into()
}
}
/// Check if all necessary keys are present to build a 4844 transaction,
/// returning a list of keys that are missing.
///
/// **NOTE:** Inner [`TransactionRequest::complete_4844`] method but "sidecar" key is filtered
/// from error.
pub fn complete_4844(&self) -> Result<(), Vec<&'static str>> {
match self.as_ref().complete_4844() {
Ok(()) => Ok(()),
Err(missing) => {
let filtered: Vec<_> =
missing.into_iter().filter(|&key| key != "sidecar").collect();
if filtered.is_empty() { Ok(()) } else { Err(filtered) }
}
}
}
/// Check if all necessary keys are present to build a Deposit transaction, returning a list of
/// keys that are missing.
pub fn complete_deposit(&self) -> Result<(), Vec<&'static str>> {
get_deposit_tx_parts(&self.as_ref().other).map(|_| ())
}
/// Return the tx type this request can be built as. Computed by checking
/// the preferred type, and then checking for completeness.
pub fn buildable_type(&self) -> Option<FoundryTxType> {
let pref = self.preferred_type();
match pref {
FoundryTxType::Legacy => self.as_ref().complete_legacy().ok(),
FoundryTxType::Eip2930 => self.as_ref().complete_2930().ok(),
FoundryTxType::Eip1559 => self.as_ref().complete_1559().ok(),
FoundryTxType::Eip4844 => self.as_ref().complete_4844().ok(),
FoundryTxType::Eip7702 => self.as_ref().complete_7702().ok(),
FoundryTxType::Deposit => self.complete_deposit().ok(),
FoundryTxType::Tempo => self.complete_tempo().ok(),
}?;
Some(pref)
}
/// Check if all necessary keys are present to build a transaction.
///
/// # Returns
///
/// - Ok(type) if all necessary keys are present to build the preferred type.
/// - Err((type, missing)) if some keys are missing to build the preferred type.
pub fn missing_keys(&self) -> Result<FoundryTxType, (FoundryTxType, Vec<&'static str>)> {
let pref = self.preferred_type();
if let Err(missing) = match pref {
FoundryTxType::Legacy => self.as_ref().complete_legacy(),
FoundryTxType::Eip2930 => self.as_ref().complete_2930(),
FoundryTxType::Eip1559 => self.as_ref().complete_1559(),
FoundryTxType::Eip4844 => self.complete_4844(),
FoundryTxType::Eip7702 => self.as_ref().complete_7702(),
FoundryTxType::Deposit => self.complete_deposit(),
FoundryTxType::Tempo => self.complete_tempo(),
} {
Err((pref, missing))
} else {
Ok(pref)
}
}
/// Build a typed transaction from this request.
///
/// Converts the request into a `FoundryTypedTx`, handling all Ethereum and OP-stack transaction
/// types.
pub fn build_typed_tx(self) -> Result<FoundryTypedTx, Self> {
// Handle deposit transactions
if let Ok(deposit_tx_parts) = get_deposit_tx_parts(&self.as_ref().other) {
Ok(FoundryTypedTx::Deposit(TxDeposit {
from: self.from().unwrap_or_default(),
source_hash: deposit_tx_parts.source_hash,
to: self.kind().unwrap_or_default(),
mint: deposit_tx_parts.mint.unwrap_or_default(),
value: self.value().unwrap_or_default(),
gas_limit: self.gas_limit().unwrap_or_default(),
is_system_transaction: deposit_tx_parts.is_system_transaction,
input: self.input().cloned().unwrap_or_default(),
}))
} else if self.is_tempo() {
// Build Tempo transaction from request fields
Ok(FoundryTypedTx::Tempo(TempoTransaction {
chain_id: self.chain_id().unwrap_or_default(),
fee_token: self.get_tempo_fee_token(),
max_fee_per_gas: self.max_fee_per_gas().unwrap_or_default(),
max_priority_fee_per_gas: self.max_priority_fee_per_gas().unwrap_or_default(),
gas_limit: self.gas_limit().unwrap_or_default(),
nonce_key: self.get_tempo_nonce_key(),
nonce: self.nonce().unwrap_or_default(),
calls: vec![Call {
to: self.kind().unwrap_or_default(),
value: self.value().unwrap_or_default(),
input: self.input().cloned().unwrap_or_default(),
}],
access_list: self.access_list().cloned().unwrap_or_default(),
..Default::default()
}))
} else if self.as_ref().has_eip4844_fields() && self.as_ref().blob_sidecar().is_none() {
// if request has eip4844 fields but no blob sidecar, try to build to eip4844 without
// sidecar
self.0
.into_inner()
.build_4844_without_sidecar()
.map_err(|e| Self(e.into_value().into()))
.map(|tx| FoundryTypedTx::Eip4844(tx.into()))
} else {
// Use the inner transaction request to build EthereumTypedTransaction
let typed_tx = self.0.into_inner().build_typed_tx().map_err(|tx| Self(tx.into()))?;
// Convert EthereumTypedTransaction to FoundryTypedTx
Ok(match typed_tx {
EthereumTypedTransaction::Legacy(tx) => FoundryTypedTx::Legacy(tx),
EthereumTypedTransaction::Eip2930(tx) => FoundryTypedTx::Eip2930(tx),
EthereumTypedTransaction::Eip1559(tx) => FoundryTypedTx::Eip1559(tx),
EthereumTypedTransaction::Eip4844(tx) => FoundryTypedTx::Eip4844(tx),
EthereumTypedTransaction::Eip7702(tx) => FoundryTypedTx::Eip7702(tx),
})
}
}
}
impl From<FoundryTypedTx> for FoundryTransactionRequest {
fn from(tx: FoundryTypedTx) -> Self {
match tx {
FoundryTypedTx::Legacy(tx) => Self(Into::<TransactionRequest>::into(tx).into()),
FoundryTypedTx::Eip2930(tx) => Self(Into::<TransactionRequest>::into(tx).into()),
FoundryTypedTx::Eip1559(tx) => Self(Into::<TransactionRequest>::into(tx).into()),
FoundryTypedTx::Eip4844(tx) => Self(Into::<TransactionRequest>::into(tx).into()),
FoundryTypedTx::Eip7702(tx) => Self(Into::<TransactionRequest>::into(tx).into()),
FoundryTypedTx::Deposit(tx) => {
let other = OtherFields::from_iter([
("sourceHash", tx.source_hash.to_string().into()),
("mint", tx.mint.to_string().into()),
("isSystemTx", tx.is_system_transaction.to_string().into()),
]);
WithOtherFields { inner: Into::<TransactionRequest>::into(tx), other }.into()
}
FoundryTypedTx::Tempo(tx) => {
let mut other = OtherFields::default();
if let Some(fee_token) = tx.fee_token {
other.insert("feeToken".to_string(), serde_json::to_value(fee_token).unwrap());
}
other.insert("nonceKey".to_string(), serde_json::to_value(tx.nonce_key).unwrap());
let first_call = tx.calls.first();
let mut inner = TransactionRequest::default()
.with_chain_id(tx.chain_id)
.with_nonce(tx.nonce)
.with_gas_limit(tx.gas_limit)
.with_max_fee_per_gas(tx.max_fee_per_gas)
.with_max_priority_fee_per_gas(tx.max_priority_fee_per_gas)
.with_kind(first_call.map(|c| c.to).unwrap_or_default())
.with_value(first_call.map(|c| c.value).unwrap_or_default())
.with_input(first_call.map(|c| c.input.clone()).unwrap_or_default())
.with_access_list(tx.access_list);
inner.transaction_type = Some(TEMPO_TX_TYPE_ID);
WithOtherFields { inner, other }.into()
}
}
}
}
impl From<FoundryTxEnvelope> for FoundryTransactionRequest {
fn from(tx: FoundryTxEnvelope) -> Self {
FoundryTypedTx::from(tx).into()
}
}
// TransactionBuilder trait implementation for FoundryNetwork
impl TransactionBuilder<FoundryNetwork> for FoundryTransactionRequest {
fn chain_id(&self) -> Option<ChainId> {
self.as_ref().chain_id
}
fn set_chain_id(&mut self, chain_id: ChainId) {
self.as_mut().chain_id = Some(chain_id);
}
fn nonce(&self) -> Option<u64> {
self.as_ref().nonce
}
fn set_nonce(&mut self, nonce: u64) {
self.as_mut().nonce = Some(nonce);
}
fn take_nonce(&mut self) -> Option<u64> {
self.as_mut().nonce.take()
}
fn input(&self) -> Option<&alloy_primitives::Bytes> {
self.as_ref().input.input()
}
fn set_input<T: Into<alloy_primitives::Bytes>>(&mut self, input: T) {
self.as_mut().input.input = Some(input.into());
}
fn set_input_kind<T: Into<alloy_primitives::Bytes>>(
&mut self,
input: T,
kind: TransactionInputKind,
) {
let inner = self.as_mut();
match kind {
TransactionInputKind::Input => inner.input.input = Some(input.into()),
TransactionInputKind::Data => inner.input.data = Some(input.into()),
TransactionInputKind::Both => {
let bytes = input.into();
inner.input.input = Some(bytes.clone());
inner.input.data = Some(bytes);
}
}
}
fn from(&self) -> Option<Address> {
self.as_ref().from
}
fn set_from(&mut self, from: Address) {
self.as_mut().from = Some(from);
}
fn kind(&self) -> Option<TxKind> {
self.as_ref().to
}
fn clear_kind(&mut self) {
self.as_mut().to = None;
}
fn set_kind(&mut self, kind: TxKind) {
self.as_mut().to = Some(kind);
}
fn value(&self) -> Option<U256> {
self.as_ref().value
}
fn set_value(&mut self, value: U256) {
self.as_mut().value = Some(value);
}
fn gas_price(&self) -> Option<u128> {
self.as_ref().gas_price
}
fn set_gas_price(&mut self, gas_price: u128) {
self.as_mut().gas_price = Some(gas_price);
}
fn max_fee_per_gas(&self) -> Option<u128> {
self.as_ref().max_fee_per_gas
}
fn set_max_fee_per_gas(&mut self, max_fee_per_gas: u128) {
self.as_mut().max_fee_per_gas = Some(max_fee_per_gas);
}
fn max_priority_fee_per_gas(&self) -> Option<u128> {
self.as_ref().max_priority_fee_per_gas
}
fn set_max_priority_fee_per_gas(&mut self, max_priority_fee_per_gas: u128) {
self.as_mut().max_priority_fee_per_gas = Some(max_priority_fee_per_gas);
}
fn gas_limit(&self) -> Option<u64> {
self.as_ref().gas
}
fn set_gas_limit(&mut self, gas_limit: u64) {
self.as_mut().gas = Some(gas_limit);
}
fn access_list(&self) -> Option<&AccessList> {
self.as_ref().access_list.as_ref()
}
fn set_access_list(&mut self, access_list: AccessList) {
self.as_mut().access_list = Some(access_list);
}
fn complete_type(&self, ty: FoundryTxType) -> Result<(), Vec<&'static str>> {
match ty {
FoundryTxType::Legacy => self.as_ref().complete_legacy(),
FoundryTxType::Eip2930 => self.as_ref().complete_2930(),
FoundryTxType::Eip1559 => self.as_ref().complete_1559(),
FoundryTxType::Eip4844 => self.as_ref().complete_4844(),
FoundryTxType::Eip7702 => self.as_ref().complete_7702(),
FoundryTxType::Deposit => self.complete_deposit(),
FoundryTxType::Tempo => self.complete_tempo(),
}
}
fn can_submit(&self) -> bool {
self.from().is_some()
}
fn can_build(&self) -> bool {
self.as_ref().can_build()
|| get_deposit_tx_parts(&self.as_ref().other).is_ok()
|| self.is_tempo()
}
fn output_tx_type(&self) -> FoundryTxType {
self.preferred_type()
}
fn output_tx_type_checked(&self) -> Option<FoundryTxType> {
self.buildable_type()
}
/// Prepares [`FoundryTransactionRequest`] by trimming conflicting fields, and filling with
/// default values the mandatory fields.
fn prep_for_submission(&mut self) {
let preferred_type = self.preferred_type();
let inner = self.as_mut();
inner.transaction_type = Some(preferred_type as u8);
inner.gas_limit().is_none().then(|| inner.set_gas_limit(Default::default()));
if !matches!(preferred_type, FoundryTxType::Deposit | FoundryTxType::Tempo) {
inner.trim_conflicting_keys();
inner.populate_blob_hashes();
}
if preferred_type != FoundryTxType::Deposit {
inner.nonce().is_none().then(|| inner.set_nonce(Default::default()));
}
if matches!(preferred_type, FoundryTxType::Legacy | FoundryTxType::Eip2930) {
inner.gas_price().is_none().then(|| inner.set_gas_price(Default::default()));
}
if preferred_type == FoundryTxType::Eip2930 {
inner.access_list().is_none().then(|| inner.set_access_list(Default::default()));
}
if matches!(
preferred_type,
FoundryTxType::Eip1559
| FoundryTxType::Eip4844
| FoundryTxType::Eip7702
| FoundryTxType::Tempo
) {
inner
.max_priority_fee_per_gas()
.is_none()
.then(|| inner.set_max_priority_fee_per_gas(Default::default()));
inner
.max_fee_per_gas()
.is_none()
.then(|| inner.set_max_fee_per_gas(Default::default()));
}
if preferred_type == FoundryTxType::Eip4844 {
inner
.as_ref()
.max_fee_per_blob_gas()
.is_none()
.then(|| inner.as_mut().set_max_fee_per_blob_gas(Default::default()));
}
}
fn build_unsigned(self) -> BuildResult<FoundryTypedTx, FoundryNetwork> {
if let Err((tx_type, missing)) = self.missing_keys() {
return Err(TransactionBuilderError::InvalidTransactionRequest(tx_type, missing)
.into_unbuilt(self));
}
Ok(self.build_typed_tx().expect("checked by missing_keys"))
}
async fn build<W: NetworkWallet<FoundryNetwork>>(
self,
wallet: &W,
) -> Result<FoundryTxEnvelope, TransactionBuilderError<FoundryNetwork>> {
Ok(wallet.sign_request(self).await?)
}
}
/// Converts `OtherFields` to `DepositTransactionParts`, produces error with missing fields
pub fn get_deposit_tx_parts(
other: &OtherFields,
) -> Result<DepositTransactionParts, Vec<&'static str>> {
let mut missing = Vec::new();
let source_hash =
other.get_deserialized::<B256>("sourceHash").transpose().ok().flatten().unwrap_or_else(
|| {
missing.push("sourceHash");
Default::default()
},
);
let mint = other
.get_deserialized::<U256>("mint")
.transpose()
.unwrap_or_else(|_| {
missing.push("mint");
Default::default()
})
.map(|value| value.to::<u128>());
let is_system_transaction =
other.get_deserialized::<bool>("isSystemTx").transpose().ok().flatten().unwrap_or_else(
|| {
missing.push("isSystemTx");
Default::default()
},
);
if missing.is_empty() {
Ok(DepositTransactionParts { source_hash, mint, is_system_transaction })
} else {
Err(missing)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/network/receipt.rs | crates/primitives/src/network/receipt.rs | use alloy_consensus::{Receipt, TxReceipt};
use alloy_network::{AnyReceiptEnvelope, AnyTransactionReceipt, ReceiptResponse};
use alloy_primitives::{Address, B256, BlockHash, TxHash, U64};
use alloy_rpc_types::{ConversionError, Log, TransactionReceipt};
use alloy_serde::WithOtherFields;
use derive_more::AsRef;
use op_alloy_consensus::{OpDepositReceipt, OpDepositReceiptWithBloom};
use serde::{Deserialize, Serialize};
use crate::FoundryReceiptEnvelope;
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, AsRef)]
pub struct FoundryTxReceipt(pub WithOtherFields<TransactionReceipt<FoundryReceiptEnvelope<Log>>>);
impl FoundryTxReceipt {
pub fn new(inner: TransactionReceipt<FoundryReceiptEnvelope<Log>>) -> Self {
Self(WithOtherFields::new(inner))
}
}
impl ReceiptResponse for FoundryTxReceipt {
fn contract_address(&self) -> Option<Address> {
self.0.contract_address
}
fn status(&self) -> bool {
self.0.inner.status()
}
fn block_hash(&self) -> Option<BlockHash> {
self.0.block_hash
}
fn block_number(&self) -> Option<u64> {
self.0.block_number
}
fn transaction_hash(&self) -> TxHash {
self.0.transaction_hash
}
fn transaction_index(&self) -> Option<u64> {
self.0.transaction_index()
}
fn gas_used(&self) -> u64 {
self.0.gas_used()
}
fn effective_gas_price(&self) -> u128 {
self.0.effective_gas_price()
}
fn blob_gas_used(&self) -> Option<u64> {
self.0.blob_gas_used()
}
fn blob_gas_price(&self) -> Option<u128> {
self.0.blob_gas_price()
}
fn from(&self) -> Address {
self.0.from()
}
fn to(&self) -> Option<Address> {
self.0.to()
}
fn cumulative_gas_used(&self) -> u64 {
self.0.cumulative_gas_used()
}
fn state_root(&self) -> Option<B256> {
self.0.state_root()
}
}
impl TryFrom<AnyTransactionReceipt> for FoundryTxReceipt {
type Error = ConversionError;
fn try_from(receipt: AnyTransactionReceipt) -> Result<Self, Self::Error> {
let WithOtherFields {
inner:
TransactionReceipt {
transaction_hash,
transaction_index,
block_hash,
block_number,
gas_used,
contract_address,
effective_gas_price,
from,
to,
blob_gas_price,
blob_gas_used,
inner: AnyReceiptEnvelope { inner: receipt_with_bloom, r#type },
},
other,
} = receipt;
Ok(Self(WithOtherFields {
inner: TransactionReceipt {
transaction_hash,
transaction_index,
block_hash,
block_number,
gas_used,
contract_address,
effective_gas_price,
from,
to,
blob_gas_price,
blob_gas_used,
inner: match r#type {
0x00 => FoundryReceiptEnvelope::Legacy(receipt_with_bloom),
0x01 => FoundryReceiptEnvelope::Eip2930(receipt_with_bloom),
0x02 => FoundryReceiptEnvelope::Eip1559(receipt_with_bloom),
0x03 => FoundryReceiptEnvelope::Eip4844(receipt_with_bloom),
0x04 => FoundryReceiptEnvelope::Eip7702(receipt_with_bloom),
0x7E => {
// Construct the deposit receipt, extracting optional deposit fields
// These fields may not be present in all receipts, so missing/invalid
// values are None
let deposit_nonce = other
.get_deserialized::<U64>("depositNonce")
.transpose()
.ok()
.flatten()
.map(|v| v.to::<u64>());
let deposit_receipt_version = other
.get_deserialized::<U64>("depositReceiptVersion")
.transpose()
.ok()
.flatten()
.map(|v| v.to::<u64>());
FoundryReceiptEnvelope::Deposit(OpDepositReceiptWithBloom {
receipt: OpDepositReceipt {
inner: Receipt {
status: alloy_consensus::Eip658Value::Eip658(
receipt_with_bloom.status(),
),
cumulative_gas_used: receipt_with_bloom.cumulative_gas_used(),
logs: receipt_with_bloom.receipt.logs,
},
deposit_nonce,
deposit_receipt_version,
},
logs_bloom: receipt_with_bloom.logs_bloom,
})
}
_ => {
let tx_type = r#type;
return Err(ConversionError::Custom(format!(
"Unknown transaction receipt type: 0x{tx_type:02X}"
)));
}
},
},
other,
}))
}
}
#[cfg(test)]
mod tests {
use super::*;
// <https://github.com/foundry-rs/foundry/issues/10852>
#[test]
fn test_receipt_convert() {
let s = r#"{"type":"0x4","status":"0x1","cumulativeGasUsed":"0x903fd1","logs":[{"address":"0x0000d9fcd47bf761e7287d8ee09917d7e2100000","topics":["0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef","0x0000000000000000000000000000000000000000000000000000000000000000","0x000000000000000000000000234ce51365b9c417171b6dad280f49143e1b0547"],"data":"0x00000000000000000000000000000000000000000000032139b42c3431700000","blockHash":"0xd26b59c1d8b5bfa9362d19eb0da3819dfe0b367987a71f6d30908dd45e0d7a60","blockNumber":"0x159663e","blockTimestamp":"0x68411f7b","transactionHash":"0x17a6af73d1317e69cfc3cac9221bd98261d40f24815850a44dbfbf96652ae52a","transactionIndex":"0x22","logIndex":"0x158","removed":false}],"logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000008100000000000000000000000000000000000000000000000020000200000000000000800000000800000000000000010000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000","transactionHash":"0x17a6af73d1317e69cfc3cac9221bd98261d40f24815850a44dbfbf96652ae52a","transactionIndex":"0x22","blockHash":"0xd26b59c1d8b5bfa9362d19eb0da3819dfe0b367987a71f6d30908dd45e0d7a60","blockNumber":"0x159663e","gasUsed":"0x28ee7","effectiveGasPrice":"0x4bf02090","from":"0x234ce51365b9c417171b6dad280f49143e1b0547","to":"0x234ce51365b9c417171b6dad280f49143e1b0547","contractAddress":null}"#;
let receipt: AnyTransactionReceipt = serde_json::from_str(s).unwrap();
let _converted = FoundryTxReceipt::try_from(receipt).unwrap();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/primitives/src/network/mod.rs | crates/primitives/src/network/mod.rs | use alloy_network::Network;
mod receipt;
pub use receipt::*;
/// Foundry network type.
///
/// This network type supports Foundry-specific transaction types, including
/// op-stack deposit transactions, alongside standard Ethereum transaction types.
///
/// Note: This is a basic implementation ("for now") that provides the core Network
/// trait definitions. Full Foundry-specific RPC types will be implemented in future work.
/// Currently, this uses Ethereum's Network configuration as a compatibility layer.
#[derive(Debug, Clone, Copy)]
pub struct FoundryNetwork {
_private: (),
}
// Use Ethereum's Network trait implementation as the basis.
// This provides compatibility with the alloy-network ecosystem while we build
// out Foundry-specific RPC types.
impl Network for FoundryNetwork {
type TxType = crate::FoundryTxType;
type TxEnvelope = crate::FoundryTxEnvelope;
type UnsignedTx = crate::FoundryTypedTx;
type ReceiptEnvelope = crate::FoundryReceiptEnvelope;
type Header = alloy_consensus::Header;
type TransactionRequest = crate::FoundryTransactionRequest;
type TransactionResponse = op_alloy_rpc_types::Transaction<crate::FoundryTxEnvelope>;
type ReceiptResponse = crate::FoundryTxReceipt;
type HeaderResponse = alloy_rpc_types_eth::Header;
type BlockResponse =
alloy_rpc_types_eth::Block<Self::TransactionResponse, Self::HeaderResponse>;
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/build.rs | crates/common/build.rs | #![expect(clippy::disallowed_macros)]
use chrono::DateTime;
use std::{error::Error, path::PathBuf};
use vergen::EmitBuilder;
fn main() -> Result<(), Box<dyn Error>> {
println!("cargo:rerun-if-changed=build.rs");
EmitBuilder::builder()
.build_date()
.build_timestamp()
.git_describe(false, true, None)
.git_sha(false)
.emit_and_set()?;
let sha = env_var("VERGEN_GIT_SHA");
let sha_short = &sha[..10];
let tag_name = try_env_var("TAG_NAME").unwrap_or_else(|| String::from("dev"));
let is_nightly = tag_name.contains("nightly");
let version_suffix = if is_nightly { "nightly" } else { &tag_name };
if is_nightly {
println!("cargo:rustc-env=FOUNDRY_IS_NIGHTLY_VERSION=true");
}
let pkg_version = env_var("CARGO_PKG_VERSION");
let version = format!("{pkg_version}-{version_suffix}");
// `PROFILE` captures only release or debug. Get the actual name from the out directory.
let out_dir = PathBuf::from(env_var("OUT_DIR"));
let profile = out_dir.components().rev().nth(3).unwrap().as_os_str().to_str().unwrap();
let build_timestamp = env_var("VERGEN_BUILD_TIMESTAMP");
let build_timestamp_unix = DateTime::parse_from_rfc3339(&build_timestamp)?.timestamp();
// The SemVer compatible version information for Foundry.
// - The latest version from Cargo.toml.
// - The short SHA of the latest commit.
// - The UNIX formatted build timestamp.
// - The build profile.
// Example: forge 0.3.0-nightly+3cb96bde9b.1737036656.debug
println!(
"cargo:rustc-env=FOUNDRY_SEMVER_VERSION={version}+{sha_short}.{build_timestamp_unix}.{profile}"
);
// The short version information for the Foundry CLI.
// - The latest version from Cargo.toml
// - The short SHA of the latest commit.
// Example: 0.3.0-dev (3cb96bde9b)
println!("cargo:rustc-env=FOUNDRY_SHORT_VERSION={version} ({sha_short} {build_timestamp})");
// The long version information for the Foundry CLI.
// - The latest version from Cargo.toml.
// - The long SHA of the latest commit.
// - The build timestamp in RFC3339 format and UNIX format in seconds.
// - The build profile.
//
// Example:
//
// ```text
// <BIN>
// Version: 0.3.0-dev
// Commit SHA: 5186142d3bb4d1be7bb4ade548b77c8e2270717e
// Build Timestamp: 2025-01-16T15:04:03.522021223Z (1737039843)
// Build Profile: debug
// ```
let long_version = format!(
"\
Version: {version}
Commit SHA: {sha}
Build Timestamp: {build_timestamp} ({build_timestamp_unix})
Build Profile: {profile}"
);
assert_eq!(long_version.lines().count(), 4);
for (i, line) in long_version.lines().enumerate() {
println!("cargo:rustc-env=FOUNDRY_LONG_VERSION_{i}={line}");
}
Ok(())
}
fn env_var(name: &str) -> String {
try_env_var(name).unwrap()
}
fn try_env_var(name: &str) -> Option<String> {
println!("cargo:rerun-if-env-changed={name}");
std::env::var(name).ok()
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/lib.rs | crates/common/src/lib.rs | //! # foundry-common
//!
//! Common utilities for building and using foundry's tools.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[expect(unused_extern_crates)] // Used by `ConsoleFmt`.
extern crate self as foundry_common;
#[macro_use]
extern crate tracing;
#[macro_use]
pub mod io;
pub use foundry_common_fmt as fmt;
pub mod abi;
pub mod calc;
pub mod comments;
pub mod compile;
pub mod constants;
pub mod contracts;
pub mod errors;
pub mod fs;
pub mod iter;
pub mod mapping_slots;
mod preprocessor;
pub mod provider;
pub mod retry;
pub mod selectors;
pub mod serde_helpers;
pub mod slot_identifier;
pub mod term;
pub mod traits;
pub mod transactions;
mod utils;
pub mod version;
pub use compile::Analysis;
pub use constants::*;
pub use contracts::*;
pub use io::{Shell, shell, stdin};
pub use traits::*;
pub use transactions::*;
pub use utils::*;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/slot_identifier.rs | crates/common/src/slot_identifier.rs | //! Storage slot identification and decoding utilities for Solidity storage layouts.
//!
//! This module provides functionality to identify and decode storage slots based on
//! Solidity storage layout information from the compiler.
use crate::mapping_slots::MappingSlots;
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_primitives::{B256, U256, hex, keccak256, map::B256Map};
use foundry_common_fmt::format_token_raw;
use foundry_compilers::artifacts::{Storage, StorageLayout, StorageType};
use serde::Serialize;
use std::{collections::BTreeMap, str::FromStr, sync::Arc};
use tracing::trace;
/// "inplace" encoding type for variables that fit in one storage slot i.e 32 bytes
pub const ENCODING_INPLACE: &str = "inplace";
/// "mapping" encoding type for Solidity mappings, which use keccak256 hash-based storage
pub const ENCODING_MAPPING: &str = "mapping";
/// "bytes" encoding type for bytes and string types, which use either inplace or keccak256
/// hash-based storage depending on length
pub const ENCODING_BYTES: &str = "bytes";
/// "dynamic_array" encoding type for dynamic arrays, which uses keccak256 hash-based storage
pub const ENCODING_DYN_ARRAY: &str = "dynamic_array";
/// Information about a storage slot including its label, type, and decoded values.
#[derive(Serialize, Debug)]
pub struct SlotInfo {
/// The variable name from the storage layout.
///
/// For top-level variables: just the variable name (e.g., "myVariable")
/// For struct members: dotted path (e.g., "myStruct.memberName")
/// For array elements: name with indices (e.g., "myArray\[0\]", "matrix\[1\]\[2\]")
/// For nested structures: full path (e.g., "outer.inner.field")
/// For mappings: base name with keys (e.g., "balances\[0x1234...\]")/ex
pub label: String,
/// The Solidity type information
#[serde(rename = "type", serialize_with = "serialize_slot_type")]
pub slot_type: StorageTypeInfo,
/// Offset within the storage slot (for packed storage)
pub offset: i64,
/// The storage slot number as a string
pub slot: String,
/// For struct members, contains nested SlotInfo for each member
///
/// This is populated when a struct's members / fields are packed in a single slot.
#[serde(skip_serializing_if = "Option::is_none")]
pub members: Option<Vec<Self>>,
/// Decoded values (if available) - used for struct members
#[serde(skip_serializing_if = "Option::is_none")]
pub decoded: Option<DecodedSlotValues>,
/// Decoded mapping keys (serialized as "key" for single, "keys" for multiple)
#[serde(
skip_serializing_if = "Option::is_none",
flatten,
serialize_with = "serialize_mapping_keys"
)]
pub keys: Option<Vec<String>>,
}
/// Wrapper type that holds both the original type label and the parsed DynSolType.
///
/// We need both because:
/// - `label`: Used for serialization to ensure output matches user expectations
/// - `dyn_sol_type`: The parsed type used for actual value decoding
#[derive(Debug)]
pub struct StorageTypeInfo {
/// The original type label from storage layout (e.g., "uint256", "address", "mapping(address
/// => uint256)")
pub label: String,
/// The parsed dynamic Solidity type used for decoding
pub dyn_sol_type: DynSolType,
}
impl SlotInfo {
/// Decodes a single storage value based on the slot's type information.
///
/// Note: For decoding [`DynSolType::Bytes`] or [`DynSolType::String`] that span multiple slots,
/// use [`SlotInfo::decode_bytes_or_string`].
pub fn decode(&self, value: B256) -> Option<DynSolValue> {
// Storage values are always 32 bytes, stored as a single word
let mut actual_type = &self.slot_type.dyn_sol_type;
// Unwrap nested arrays to get to the base element type.
while let DynSolType::FixedArray(elem_type, _) = actual_type {
actual_type = elem_type.as_ref();
}
// Special handling for bytes and string types
match actual_type {
DynSolType::Bytes | DynSolType::String => {
// Decode bytes/string from storage
// The last byte contains the length * 2 for short strings/bytes
// or length * 2 + 1 for long strings/bytes
let length_byte = value.0[31];
if length_byte & 1 == 0 {
// Short string/bytes (less than 32 bytes)
let length = (length_byte >> 1) as usize;
// Extract data
let data = if length == 0 { Vec::new() } else { value.0[0..length].to_vec() };
// Create the appropriate value based on type
if matches!(actual_type, DynSolType::String) {
let str_val = if data.is_empty() {
String::new()
} else {
String::from_utf8(data).unwrap_or_default()
};
Some(DynSolValue::String(str_val))
} else {
Some(DynSolValue::Bytes(data))
}
} else {
// Long string/bytes (32 bytes or more)
// The actual data is stored at keccak256(slot)
// Return None for long values - they need decode_bytes_or_string()
None
}
}
_ => {
// Decode based on the actual type
actual_type.abi_decode(&value.0).ok()
}
}
}
/// Slot is of type [`DynSolType::Bytes`] or [`DynSolType::String`]
pub fn is_bytes_or_string(&self) -> bool {
matches!(self.slot_type.dyn_sol_type, DynSolType::Bytes | DynSolType::String)
}
/// Decodes a [`DynSolType::Bytes`] or [`DynSolType::String`] value
/// that spans across multiple slots.
pub fn decode_bytes_or_string(
&mut self,
base_slot: &B256,
storage_values: &B256Map<B256>,
) -> Option<DynSolValue> {
// Only process bytes/string types
if !self.is_bytes_or_string() {
return None;
}
// Try to handle as long bytes/string
self.aggregate_bytes_or_strings(base_slot, storage_values).map(|data| {
match self.slot_type.dyn_sol_type {
DynSolType::String => {
DynSolValue::String(String::from_utf8(data).unwrap_or_default())
}
DynSolType::Bytes => DynSolValue::Bytes(data),
_ => unreachable!(),
}
})
}
/// Decodes both previous and new [`DynSolType::Bytes`] or [`DynSolType::String`] values
/// that span across multiple slots using state diff data.
///
/// Accepts a mapping of storage_slot to (previous_value, new_value).
pub fn decode_bytes_or_string_values(
&mut self,
base_slot: &B256,
storage_accesses: &BTreeMap<B256, (B256, B256)>,
) {
// Only process bytes/string types
if !self.is_bytes_or_string() {
return;
}
// Get both previous and new values from the storage accesses
if let Some((prev_base_value, new_base_value)) = storage_accesses.get(base_slot) {
// Reusable closure to decode bytes/string based on length encoding
let mut decode_value = |base_value: B256, is_new: bool| {
let length_byte = base_value.0[31];
if length_byte & 1 == 1 {
// Long bytes/string - aggregate from multiple slots
let value_map = storage_accesses
.iter()
.map(|(slot, (prev, new))| (*slot, if is_new { *new } else { *prev }))
.collect::<B256Map<_>>();
self.decode_bytes_or_string(base_slot, &value_map)
} else {
// Short bytes/string - decode directly from base slot
self.decode(base_value)
}
};
// Decode previous value
let prev_decoded = decode_value(*prev_base_value, false);
// Decode new value
let new_decoded = decode_value(*new_base_value, true);
// Set decoded values if both were successfully decoded
if let (Some(prev), Some(new)) = (prev_decoded, new_decoded) {
self.decoded = Some(DecodedSlotValues { previous_value: prev, new_value: new });
}
}
}
/// Aggregates a [`DynSolType::Bytes`] or [`DynSolType::String`] value that spans across
/// multiple slots by looking up the length in the base_slot.
///
/// Returns the aggregated raw bytes.
fn aggregate_bytes_or_strings(
&mut self,
base_slot: &B256,
storage_values: &B256Map<B256>,
) -> Option<Vec<u8>> {
if !self.is_bytes_or_string() {
return None;
}
// Check if it's a long bytes/string by looking at the base value
if let Some(base_value) = storage_values.get(base_slot) {
let length_byte = base_value.0[31];
// Check if value is long
if length_byte & 1 == 1 {
// Long bytes/string - populate members
let length: U256 = U256::from_be_bytes(base_value.0) >> 1;
let num_slots = length.to::<usize>().div_ceil(32).min(256);
let data_start = U256::from_be_bytes(keccak256(base_slot.0).0);
let mut members = Vec::new();
let mut full_data = Vec::with_capacity(length.to::<usize>());
for i in 0..num_slots {
let data_slot = B256::from(data_start + U256::from(i));
let data_slot_u256 = data_start + U256::from(i);
// Create member info for this data slot with indexed label
let member_info = Self {
label: format!("{}[{}]", self.label, i),
slot_type: StorageTypeInfo {
label: self.slot_type.label.clone(),
dyn_sol_type: DynSolType::FixedBytes(32),
},
offset: 0,
slot: data_slot_u256.to_string(),
members: None,
decoded: None,
keys: None,
};
if let Some(value) = storage_values.get(&data_slot) {
// Collect data
let bytes_to_take =
std::cmp::min(32, length.to::<usize>() - full_data.len());
full_data.extend_from_slice(&value.0[..bytes_to_take]);
}
members.push(member_info);
}
// Set the members field
if !members.is_empty() {
self.members = Some(members);
}
return Some(full_data);
}
}
None
}
/// Decodes storage values (previous and new) and populates the decoded field.
/// For structs with members, it decodes each member individually.
pub fn decode_values(&mut self, previous_value: B256, new_value: B256) {
// If this is a struct with members, decode each member individually
if let Some(members) = &mut self.members {
for member in members.iter_mut() {
let offset = member.offset as usize;
let size = match &member.slot_type.dyn_sol_type {
DynSolType::Uint(bits) | DynSolType::Int(bits) => bits / 8,
DynSolType::Address => 20,
DynSolType::Bool => 1,
DynSolType::FixedBytes(size) => *size,
_ => 32, // Default to full word
};
// Extract and decode member values
let mut prev_bytes = [0u8; 32];
let mut new_bytes = [0u8; 32];
if offset + size <= 32 {
// In Solidity storage, values are right-aligned
// For offset 0, we want the rightmost bytes
// For offset 16 (for a uint128), we want bytes 0-16
// For packed storage: offset 0 is at the rightmost position
// offset 0, size 16 -> read bytes 16-32 (rightmost)
// offset 16, size 16 -> read bytes 0-16 (leftmost)
let byte_start = 32 - offset - size;
prev_bytes[32 - size..]
.copy_from_slice(&previous_value.0[byte_start..byte_start + size]);
new_bytes[32 - size..]
.copy_from_slice(&new_value.0[byte_start..byte_start + size]);
}
// Decode the member values
if let (Ok(prev_val), Ok(new_val)) = (
member.slot_type.dyn_sol_type.abi_decode(&prev_bytes),
member.slot_type.dyn_sol_type.abi_decode(&new_bytes),
) {
member.decoded =
Some(DecodedSlotValues { previous_value: prev_val, new_value: new_val });
}
}
// For structs with members, we don't need a top-level decoded value
} else {
// For non-struct types, decode directly
// Note: decode() returns None for long bytes/strings, which will be handled by
// decode_bytes_or_string()
if let (Some(prev), Some(new)) = (self.decode(previous_value), self.decode(new_value)) {
self.decoded = Some(DecodedSlotValues { previous_value: prev, new_value: new });
}
}
}
}
/// Custom serializer for StorageTypeInfo that only outputs the label
fn serialize_slot_type<S>(info: &StorageTypeInfo, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&info.label)
}
/// Custom serializer for mapping keys
fn serialize_mapping_keys<S>(keys: &Option<Vec<String>>, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeMap;
if let Some(keys) = keys {
let len = if keys.is_empty() { 0 } else { 1 };
let mut map = serializer.serialize_map(Some(len))?;
if keys.len() == 1 {
map.serialize_entry("key", &keys[0])?;
} else if keys.len() > 1 {
map.serialize_entry("keys", keys)?;
}
map.end()
} else {
serializer.serialize_none()
}
}
/// Decoded storage slot values
#[derive(Debug)]
pub struct DecodedSlotValues {
/// Initial decoded storage value
pub previous_value: DynSolValue,
/// Current decoded storage value
pub new_value: DynSolValue,
}
impl Serialize for DecodedSlotValues {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde::ser::SerializeStruct;
let mut state = serializer.serialize_struct("DecodedSlotValues", 2)?;
state.serialize_field("previousValue", &format_token_raw(&self.previous_value))?;
state.serialize_field("newValue", &format_token_raw(&self.new_value))?;
state.end()
}
}
/// Storage slot identifier that uses Solidity [`StorageLayout`] to identify storage slots.
pub struct SlotIdentifier {
storage_layout: Arc<StorageLayout>,
}
impl SlotIdentifier {
/// Creates a new SlotIdentifier with the given storage layout.
pub fn new(storage_layout: Arc<StorageLayout>) -> Self {
Self { storage_layout }
}
/// Identifies a storage slots type using the [`StorageLayout`].
///
/// It can also identify whether a slot belongs to a mapping if provided with [`MappingSlots`].
pub fn identify(&self, slot: &B256, mapping_slots: Option<&MappingSlots>) -> Option<SlotInfo> {
trace!(?slot, "identifying slot");
let slot_u256 = U256::from_be_bytes(slot.0);
let slot_str = slot_u256.to_string();
for storage in &self.storage_layout.storage {
let storage_type = self.storage_layout.types.get(&storage.storage_type)?;
let dyn_type = DynSolType::parse(&storage_type.label).ok();
// Check if we're able to match on a slot from the layout i.e any of the base slots.
// This will always be the case for primitive types that fit in a single slot.
if storage.slot == slot_str
&& let Some(parsed_type) = dyn_type
{
// Successfully parsed - handle arrays or simple types
let label = if let DynSolType::FixedArray(_, _) = &parsed_type {
format!("{}{}", storage.label, get_array_base_indices(&parsed_type))
} else {
storage.label.clone()
};
return Some(SlotInfo {
label,
slot_type: StorageTypeInfo {
label: storage_type.label.clone(),
dyn_sol_type: parsed_type,
},
offset: storage.offset,
slot: storage.slot.clone(),
members: None,
decoded: None,
keys: None,
});
}
// Encoding types: <https://docs.soliditylang.org/en/latest/internals/layout_in_storage.html#json-output>
if storage_type.encoding == ENCODING_INPLACE {
// Can be of type FixedArrays or Structs
// Handles the case where the accessed `slot` is maybe different from the base slot.
let array_start_slot = U256::from_str(&storage.slot).ok()?;
if let Some(parsed_type) = dyn_type
&& let DynSolType::FixedArray(_, _) = parsed_type
&& let Some(slot_info) = self.handle_array_slot(
storage,
storage_type,
slot_u256,
array_start_slot,
&slot_str,
)
{
return Some(slot_info);
}
// If type parsing fails and the label is a struct
if is_struct(&storage_type.label) {
let struct_start_slot = U256::from_str(&storage.slot).ok()?;
if let Some(slot_info) = self.handle_struct(
&storage.label,
storage_type,
slot_u256,
struct_start_slot,
storage.offset,
&slot_str,
0,
) {
return Some(slot_info);
}
}
} else if storage_type.encoding == ENCODING_MAPPING
&& let Some(mapping_slots) = mapping_slots
&& let Some(info) =
self.handle_mapping(storage, storage_type, slot, &slot_str, mapping_slots)
{
return Some(info);
}
}
None
}
/// Identifies a bytes or string storage slot by checking all bytes/string variables
/// in the storage layout and using their base slot values from the provided storage changes.
///
/// # Arguments
/// * `slot` - The slot being identified
/// * `storage_values` - Map of storage slots to their current values
pub fn identify_bytes_or_string(
&self,
slot: &B256,
storage_values: &B256Map<B256>,
) -> Option<SlotInfo> {
let slot_u256 = U256::from_be_bytes(slot.0);
let slot_str = slot_u256.to_string();
// Search through all bytes/string variables in the storage layout
for storage in &self.storage_layout.storage {
if let Some(storage_type) = self.storage_layout.types.get(&storage.storage_type)
&& storage_type.encoding == ENCODING_BYTES
{
let Some(base_slot) = U256::from_str(&storage.slot).map(B256::from).ok() else {
continue;
};
// Get the base slot value from storage_values
if let Some(base_value) = storage_values.get(&base_slot)
&& let Some(info) = self.handle_bytes_string(
storage,
storage_type,
slot_u256,
&slot_str,
base_value,
)
{
return Some(info);
}
}
}
None
}
/// Handles identification of array slots.
///
/// # Arguments
/// * `storage` - The storage metadata from the layout
/// * `storage_type` - Type information for the storage slot
/// * `slot` - The target slot being identified
/// * `array_start_slot` - The starting slot of the array in storage i.e base_slot
/// * `slot_str` - String representation of the slot for output
fn handle_array_slot(
&self,
storage: &Storage,
storage_type: &StorageType,
slot: U256,
array_start_slot: U256,
slot_str: &str,
) -> Option<SlotInfo> {
// Check if slot is within array bounds
let total_bytes = storage_type.number_of_bytes.parse::<u64>().ok()?;
let total_slots = total_bytes.div_ceil(32);
if slot >= array_start_slot && slot < array_start_slot + U256::from(total_slots) {
let parsed_type = DynSolType::parse(&storage_type.label).ok()?;
let index = (slot - array_start_slot).to::<u64>();
// Format the array element label based on array dimensions
let label = match &parsed_type {
DynSolType::FixedArray(inner, _) => {
if let DynSolType::FixedArray(_, inner_size) = inner.as_ref() {
// 2D array: calculate row and column
let row = index / (*inner_size as u64);
let col = index % (*inner_size as u64);
format!("{}[{row}][{col}]", storage.label)
} else {
// 1D array
format!("{}[{index}]", storage.label)
}
}
_ => storage.label.clone(),
};
return Some(SlotInfo {
label,
slot_type: StorageTypeInfo {
label: storage_type.label.clone(),
dyn_sol_type: parsed_type,
},
offset: 0,
slot: slot_str.to_string(),
members: None,
decoded: None,
keys: None,
});
}
None
}
/// Handles identification of struct slots.
///
/// Recursively resolves struct members to find the exact member corresponding
/// to the target slot. Handles both single-slot (packed) and multi-slot structs.
///
/// # Arguments
/// * `base_label` - The label/name for this struct or member
/// * `storage_type` - Type information for the storage
/// * `target_slot` - The target slot being identified
/// * `struct_start_slot` - The starting slot of this struct
/// * `offset` - Offset within the slot (for packed storage)
/// * `slot_str` - String representation of the slot for output
/// * `depth` - Current recursion depth
#[allow(clippy::too_many_arguments)]
fn handle_struct(
&self,
base_label: &str,
storage_type: &StorageType,
target_slot: U256,
struct_start_slot: U256,
offset: i64,
slot_str: &str,
depth: usize,
) -> Option<SlotInfo> {
// Limit recursion depth to prevent stack overflow
const MAX_DEPTH: usize = 10;
if depth > MAX_DEPTH {
return None;
}
let members = storage_type
.other
.get("members")
.and_then(|v| serde_json::from_value::<Vec<Storage>>(v.clone()).ok())?;
// If this is the exact slot we're looking for (struct's base slot)
if struct_start_slot == target_slot
// Find the member at slot offset 0 (the member that starts at this slot)
&& let Some(first_member) = members.iter().find(|m| m.slot == "0")
{
let member_type_info = self.storage_layout.types.get(&first_member.storage_type)?;
// Check if we have a single-slot struct (all members have slot "0")
let is_single_slot = members.iter().all(|m| m.slot == "0");
if is_single_slot {
// Build member info for single-slot struct
let mut member_infos = Vec::new();
for member in &members {
if let Some(member_type_info) =
self.storage_layout.types.get(&member.storage_type)
&& let Some(member_type) = DynSolType::parse(&member_type_info.label).ok()
{
member_infos.push(SlotInfo {
label: member.label.clone(),
slot_type: StorageTypeInfo {
label: member_type_info.label.clone(),
dyn_sol_type: member_type,
},
offset: member.offset,
slot: slot_str.to_string(),
members: None,
decoded: None,
keys: None,
});
}
}
// Build the CustomStruct type
let struct_name =
storage_type.label.strip_prefix("struct ").unwrap_or(&storage_type.label);
let prop_names: Vec<String> = members.iter().map(|m| m.label.clone()).collect();
let member_types: Vec<DynSolType> =
member_infos.iter().map(|info| info.slot_type.dyn_sol_type.clone()).collect();
let parsed_type = DynSolType::CustomStruct {
name: struct_name.to_string(),
prop_names,
tuple: member_types,
};
return Some(SlotInfo {
label: base_label.to_string(),
slot_type: StorageTypeInfo {
label: storage_type.label.clone(),
dyn_sol_type: parsed_type,
},
offset,
slot: slot_str.to_string(),
decoded: None,
members: if member_infos.is_empty() { None } else { Some(member_infos) },
keys: None,
});
} else {
// Multi-slot struct - return the first member.
let member_label = format!("{}.{}", base_label, first_member.label);
// If the first member is itself a struct, recurse
if is_struct(&member_type_info.label) {
return self.handle_struct(
&member_label,
member_type_info,
target_slot,
struct_start_slot,
first_member.offset,
slot_str,
depth + 1,
);
}
// Return the first member as a primitive
return Some(SlotInfo {
label: member_label,
slot_type: StorageTypeInfo {
label: member_type_info.label.clone(),
dyn_sol_type: DynSolType::parse(&member_type_info.label).ok()?,
},
offset: first_member.offset,
slot: slot_str.to_string(),
decoded: None,
members: None,
keys: None,
});
}
}
// Not the base slot - search through members
for member in &members {
let member_slot_offset = U256::from_str(&member.slot).ok()?;
let member_slot = struct_start_slot + member_slot_offset;
let member_type_info = self.storage_layout.types.get(&member.storage_type)?;
let member_label = format!("{}.{}", base_label, member.label);
// If this member is a struct, recurse into it
if is_struct(&member_type_info.label) {
let slot_info = self.handle_struct(
&member_label,
member_type_info,
target_slot,
member_slot,
member.offset,
slot_str,
depth + 1,
);
if member_slot == target_slot || slot_info.is_some() {
return slot_info;
}
}
if member_slot == target_slot {
// Found the exact member slot
// Regular member
let member_type = DynSolType::parse(&member_type_info.label).ok()?;
return Some(SlotInfo {
label: member_label,
slot_type: StorageTypeInfo {
label: member_type_info.label.clone(),
dyn_sol_type: member_type,
},
offset: member.offset,
slot: slot_str.to_string(),
members: None,
decoded: None,
keys: None,
});
}
}
None
}
/// Handles identification of mapping slots.
///
/// Identifies mapping entries by walking up the parent chain to find the base slot,
/// then decodes the keys and builds the appropriate label.
///
/// # Arguments
/// * `storage` - The storage metadata from the layout
/// * `storage_type` - Type information for the storage
/// * `slot` - The accessed slot being identified
/// * `slot_str` - String representation of the slot for output
/// * `mapping_slots` - Tracked mapping slot accesses for key resolution
fn handle_mapping(
&self,
storage: &Storage,
storage_type: &StorageType,
slot: &B256,
slot_str: &str,
mapping_slots: &MappingSlots,
) -> Option<SlotInfo> {
trace!(
"handle_mapping: storage.slot={}, slot={:?}, has_keys={}, has_parents={}",
storage.slot,
slot,
mapping_slots.keys.contains_key(slot),
mapping_slots.parent_slots.contains_key(slot)
);
// Verify it's actually a mapping type
if storage_type.encoding != ENCODING_MAPPING {
return None;
}
// Check if this slot is a known mapping entry
if !mapping_slots.keys.contains_key(slot) {
return None;
}
// Convert storage.slot to B256 for comparison
let storage_slot_b256 = B256::from(U256::from_str(&storage.slot).ok()?);
// Walk up the parent chain to collect keys and validate the base slot
let mut current_slot = *slot;
let mut keys_to_decode = Vec::new();
let mut found_base = false;
while let Some((key, parent)) =
mapping_slots.keys.get(¤t_slot).zip(mapping_slots.parent_slots.get(¤t_slot))
{
keys_to_decode.push(*key);
// Check if the parent is our base storage slot
if *parent == storage_slot_b256 {
found_base = true;
break;
}
// Move up to the parent for the next iteration
current_slot = *parent;
}
if !found_base {
trace!("Mapping slot {} does not match any parent in chain", storage.slot);
return None;
}
// Resolve the mapping type to get all key types and the final value type
let (key_types, value_type_label, full_type_label) =
self.resolve_mapping_type(&storage.storage_type)?;
// Reverse keys to process from outermost to innermost
keys_to_decode.reverse();
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/version.rs | crates/common/src/version.rs | //! Foundry version information.
/// The SemVer compatible version information for Foundry.
pub const SEMVER_VERSION: &str = env!("FOUNDRY_SEMVER_VERSION");
/// The short version message information for the Foundry CLI.
pub const SHORT_VERSION: &str = env!("FOUNDRY_SHORT_VERSION");
/// The long version message information for the Foundry CLI.
pub const LONG_VERSION: &str = concat!(
env!("FOUNDRY_LONG_VERSION_0"),
"\n",
env!("FOUNDRY_LONG_VERSION_1"),
"\n",
env!("FOUNDRY_LONG_VERSION_2"),
"\n",
env!("FOUNDRY_LONG_VERSION_3"),
);
/// Whether the version is a nightly build.
pub const IS_NIGHTLY_VERSION: bool = option_env!("FOUNDRY_IS_NIGHTLY_VERSION").is_some();
/// The warning message for nightly versions.
pub const NIGHTLY_VERSION_WARNING_MESSAGE: &str = "This is a nightly build of Foundry. It is recommended to use the latest stable version. \
To mute this warning set `FOUNDRY_DISABLE_NIGHTLY_WARNING` in your environment. \n";
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/calc.rs | crates/common/src/calc.rs | //! Commonly used calculations.
/// Returns the mean of the slice.
pub fn mean(values: &[u64]) -> u64 {
if values.is_empty() {
return 0;
}
(values.iter().map(|x| *x as u128).sum::<u128>() / values.len() as u128) as u64
}
/// Returns the median of a _sorted_ slice.
pub fn median_sorted(values: &[u64]) -> u64 {
if values.is_empty() {
return 0;
}
let len = values.len();
let mid = len / 2;
if len.is_multiple_of(2) { (values[mid - 1] + values[mid]) / 2 } else { values[mid] }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn calc_mean_empty() {
let m = mean(&[]);
assert_eq!(m, 0);
}
#[test]
fn calc_mean() {
let m = mean(&[0, 1, 2, 3, 4, 5, 6]);
assert_eq!(m, 3);
}
#[test]
fn calc_mean_overflow() {
let m = mean(&[0, 1, 2, u32::MAX as u64, 3, u16::MAX as u64, u64::MAX, 6]);
assert_eq!(m, 2305843009750573057);
}
#[test]
fn calc_median_empty() {
let m = median_sorted(&[]);
assert_eq!(m, 0);
}
#[test]
fn calc_median() {
let mut values = vec![29, 30, 31, 40, 59, 61, 71];
values.sort();
let m = median_sorted(&values);
assert_eq!(m, 40);
}
#[test]
fn calc_median_even() {
let mut values = vec![80, 90, 30, 40, 50, 60, 10, 20];
values.sort();
let m = median_sorted(&values);
assert_eq!(m, 45);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/transactions.rs | crates/common/src/transactions.rs | //! Wrappers for transactions.
use alloy_consensus::{Transaction, TxEnvelope, transaction::SignerRecoverable};
use alloy_eips::eip7702::SignedAuthorization;
use alloy_network::AnyTransactionReceipt;
use alloy_primitives::{Address, Bytes, TxKind, U256};
use alloy_provider::{
Provider,
network::{AnyNetwork, ReceiptResponse, TransactionBuilder},
};
use alloy_rpc_types::{BlockId, TransactionRequest};
use alloy_serde::WithOtherFields;
use eyre::Result;
use foundry_common_fmt::UIfmt;
use serde::{Deserialize, Serialize};
/// Helper type to carry a transaction along with an optional revert reason
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct TransactionReceiptWithRevertReason {
/// The underlying transaction receipt
#[serde(flatten)]
pub receipt: AnyTransactionReceipt,
/// The revert reason string if the transaction status is failed
#[serde(skip_serializing_if = "Option::is_none", rename = "revertReason")]
pub revert_reason: Option<String>,
}
impl TransactionReceiptWithRevertReason {
/// Returns if the status of the transaction is 0 (failure)
pub fn is_failure(&self) -> bool {
!self.receipt.inner.inner.inner.receipt.status.coerce_status()
}
/// Updates the revert reason field using `eth_call` and returns an Err variant if the revert
/// reason was not successfully updated
pub async fn update_revert_reason<P: Provider<AnyNetwork>>(
&mut self,
provider: &P,
) -> Result<()> {
self.revert_reason = self.fetch_revert_reason(provider).await?;
Ok(())
}
async fn fetch_revert_reason<P: Provider<AnyNetwork>>(
&self,
provider: &P,
) -> Result<Option<String>> {
if !self.is_failure() {
return Ok(None);
}
let transaction = provider
.get_transaction_by_hash(self.receipt.transaction_hash)
.await
.map_err(|err| eyre::eyre!("unable to fetch transaction: {err}"))?
.ok_or_else(|| eyre::eyre!("transaction not found"))?;
if let Some(block_hash) = self.receipt.block_hash {
let mut call_request: WithOtherFields<TransactionRequest> =
transaction.inner.inner.clone_inner().into();
call_request.set_from(transaction.inner.inner.signer());
match provider.call(call_request).block(BlockId::Hash(block_hash.into())).await {
Err(e) => return Ok(extract_revert_reason(e.to_string())),
Ok(_) => eyre::bail!("no revert reason as transaction succeeded"),
}
}
eyre::bail!("unable to fetch block_hash")
}
}
impl From<AnyTransactionReceipt> for TransactionReceiptWithRevertReason {
fn from(receipt: AnyTransactionReceipt) -> Self {
Self { receipt, revert_reason: None }
}
}
impl From<TransactionReceiptWithRevertReason> for AnyTransactionReceipt {
fn from(receipt_with_reason: TransactionReceiptWithRevertReason) -> Self {
receipt_with_reason.receipt
}
}
impl UIfmt for TransactionReceiptWithRevertReason {
fn pretty(&self) -> String {
if let Some(revert_reason) = &self.revert_reason {
format!(
"{}
revertReason {}",
self.receipt.pretty(),
revert_reason
)
} else {
self.receipt.pretty()
}
}
}
impl UIfmt for TransactionMaybeSigned {
fn pretty(&self) -> String {
match self {
Self::Signed { tx, .. } => tx.pretty(),
Self::Unsigned(tx) => format!(
"
accessList {}
chainId {}
gasLimit {}
gasPrice {}
input {}
maxFeePerBlobGas {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
to {}
type {}
value {}",
tx.access_list
.as_ref()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
tx.chain_id.pretty(),
tx.gas_limit().unwrap_or_default(),
tx.gas_price.pretty(),
tx.input.input.pretty(),
tx.max_fee_per_blob_gas.pretty(),
tx.max_fee_per_gas.pretty(),
tx.max_priority_fee_per_gas.pretty(),
tx.nonce.pretty(),
tx.to.as_ref().map(|a| a.to()).unwrap_or_default().pretty(),
tx.transaction_type.unwrap_or_default(),
tx.value.pretty(),
),
}
}
}
fn extract_revert_reason<S: AsRef<str>>(error_string: S) -> Option<String> {
let message_substr = "execution reverted: ";
error_string
.as_ref()
.find(message_substr)
.map(|index| error_string.as_ref().split_at(index + message_substr.len()).1.to_string())
}
/// Returns the `UiFmt::pretty()` formatted attribute of the transaction receipt
pub fn get_pretty_tx_receipt_attr(
receipt: &TransactionReceiptWithRevertReason,
attr: &str,
) -> Option<String> {
match attr {
"blockHash" | "block_hash" => Some(receipt.receipt.block_hash.pretty()),
"blockNumber" | "block_number" => Some(receipt.receipt.block_number.pretty()),
"contractAddress" | "contract_address" => Some(receipt.receipt.contract_address.pretty()),
"cumulativeGasUsed" | "cumulative_gas_used" => {
Some(receipt.receipt.inner.inner.inner.receipt.cumulative_gas_used.pretty())
}
"effectiveGasPrice" | "effective_gas_price" => {
Some(receipt.receipt.effective_gas_price.to_string())
}
"gasUsed" | "gas_used" => Some(receipt.receipt.gas_used.to_string()),
"logs" => Some(receipt.receipt.inner.inner.inner.receipt.logs.as_slice().pretty()),
"logsBloom" | "logs_bloom" => Some(receipt.receipt.inner.inner.inner.logs_bloom.pretty()),
"root" | "stateRoot" | "state_root " => Some(receipt.receipt.state_root().pretty()),
"status" | "statusCode" | "status_code" => {
Some(receipt.receipt.inner.inner.inner.receipt.status.pretty())
}
"transactionHash" | "transaction_hash" => Some(receipt.receipt.transaction_hash.pretty()),
"transactionIndex" | "transaction_index" => {
Some(receipt.receipt.transaction_index.pretty())
}
"type" | "transaction_type" => Some(receipt.receipt.inner.inner.r#type.to_string()),
"revertReason" | "revert_reason" => Some(receipt.revert_reason.pretty()),
_ => None,
}
}
/// Used for broadcasting transactions
/// A transaction can either be a [`TransactionRequest`] waiting to be signed
/// or a [`TxEnvelope`], already signed
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum TransactionMaybeSigned {
Signed {
#[serde(flatten)]
tx: TxEnvelope,
from: Address,
},
Unsigned(WithOtherFields<TransactionRequest>),
}
impl TransactionMaybeSigned {
/// Creates a new (unsigned) transaction for broadcast
pub fn new(tx: WithOtherFields<TransactionRequest>) -> Self {
Self::Unsigned(tx)
}
/// Creates a new signed transaction for broadcast.
pub fn new_signed(
tx: TxEnvelope,
) -> core::result::Result<Self, alloy_consensus::crypto::RecoveryError> {
let from = tx.recover_signer()?;
Ok(Self::Signed { tx, from })
}
pub fn is_unsigned(&self) -> bool {
matches!(self, Self::Unsigned(_))
}
pub fn as_unsigned_mut(&mut self) -> Option<&mut WithOtherFields<TransactionRequest>> {
match self {
Self::Unsigned(tx) => Some(tx),
_ => None,
}
}
pub fn from(&self) -> Option<Address> {
match self {
Self::Signed { from, .. } => Some(*from),
Self::Unsigned(tx) => tx.from,
}
}
pub fn input(&self) -> Option<&Bytes> {
match self {
Self::Signed { tx, .. } => Some(tx.input()),
Self::Unsigned(tx) => tx.input.input(),
}
}
pub fn to(&self) -> Option<TxKind> {
match self {
Self::Signed { tx, .. } => Some(tx.kind()),
Self::Unsigned(tx) => tx.to,
}
}
pub fn value(&self) -> Option<U256> {
match self {
Self::Signed { tx, .. } => Some(tx.value()),
Self::Unsigned(tx) => tx.value,
}
}
pub fn gas(&self) -> Option<u128> {
match self {
Self::Signed { tx, .. } => Some(tx.gas_limit() as u128),
Self::Unsigned(tx) => tx.gas_limit().map(|g| g as u128),
}
}
pub fn nonce(&self) -> Option<u64> {
match self {
Self::Signed { tx, .. } => Some(tx.nonce()),
Self::Unsigned(tx) => tx.nonce,
}
}
pub fn authorization_list(&self) -> Option<Vec<SignedAuthorization>> {
match self {
Self::Signed { tx, .. } => tx.authorization_list().map(|auths| auths.to_vec()),
Self::Unsigned(tx) => tx.authorization_list.as_deref().map(|auths| auths.to_vec()),
}
.filter(|auths| !auths.is_empty())
}
}
impl From<TransactionRequest> for TransactionMaybeSigned {
fn from(tx: TransactionRequest) -> Self {
Self::new(WithOtherFields::new(tx))
}
}
impl TryFrom<TxEnvelope> for TransactionMaybeSigned {
type Error = alloy_consensus::crypto::RecoveryError;
fn try_from(tx: TxEnvelope) -> core::result::Result<Self, Self::Error> {
Self::new_signed(tx)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_extract_revert_reason() {
let error_string_1 = "server returned an error response: error code 3: execution reverted: Transaction too old";
let error_string_2 = "server returned an error response: error code 3: Invalid signature";
assert_eq!(extract_revert_reason(error_string_1), Some("Transaction too old".to_string()));
assert_eq!(extract_revert_reason(error_string_2), None);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/compile.rs | crates/common/src/compile.rs | //! Support for compiling [foundry_compilers::Project]
use crate::{
TestFunctionExt, preprocessor::DynamicTestLinkingPreprocessor, shell, term::SpinnerReporter,
};
use comfy_table::{Cell, Color, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN};
use eyre::Result;
use foundry_block_explorers::contract::Metadata;
use foundry_compilers::{
Artifact, Project, ProjectBuilder, ProjectCompileOutput, ProjectPathsConfig, SolcConfig,
artifacts::{BytecodeObject, Contract, Source, remappings::Remapping},
compilers::{
Compiler,
solc::{Solc, SolcCompiler},
},
info::ContractInfo as CompilerContractInfo,
multi::{MultiCompiler, MultiCompilerSettings},
project::Preprocessor,
report::{BasicStdoutReporter, NoReporter, Report},
solc::SolcSettings,
};
use num_format::{Locale, ToFormattedString};
use std::{
collections::BTreeMap,
fmt::Display,
io::IsTerminal,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
time::Instant,
};
/// A Solar compiler instance, to grant syntactic and semantic analysis capabilities.
pub type Analysis = Arc<solar::sema::Compiler>;
/// Builder type to configure how to compile a project.
///
/// This is merely a wrapper for [`Project::compile()`] which also prints to stdout depending on its
/// settings.
#[must_use = "ProjectCompiler does nothing unless you call a `compile*` method"]
pub struct ProjectCompiler {
/// The root of the project.
project_root: PathBuf,
/// Whether to also print contract names.
print_names: Option<bool>,
/// Whether to also print contract sizes.
print_sizes: Option<bool>,
/// Whether to print anything at all. Overrides other `print` options.
quiet: Option<bool>,
/// Whether to bail on compiler errors.
bail: Option<bool>,
/// Whether to ignore the contract initcode size limit introduced by EIP-3860.
ignore_eip_3860: bool,
/// Extra files to include, that are not necessarily in the project's source directory.
files: Vec<PathBuf>,
/// Whether to compile with dynamic linking tests and scripts.
dynamic_test_linking: bool,
}
impl Default for ProjectCompiler {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl ProjectCompiler {
/// Create a new builder with the default settings.
#[inline]
pub fn new() -> Self {
Self {
project_root: PathBuf::new(),
print_names: None,
print_sizes: None,
quiet: Some(crate::shell::is_quiet()),
bail: None,
ignore_eip_3860: false,
files: Vec::new(),
dynamic_test_linking: false,
}
}
/// Sets whether to print contract names.
#[inline]
pub fn print_names(mut self, yes: bool) -> Self {
self.print_names = Some(yes);
self
}
/// Sets whether to print contract sizes.
#[inline]
pub fn print_sizes(mut self, yes: bool) -> Self {
self.print_sizes = Some(yes);
self
}
/// Sets whether to print anything at all. Overrides other `print` options.
#[inline]
#[doc(alias = "silent")]
pub fn quiet(mut self, yes: bool) -> Self {
self.quiet = Some(yes);
self
}
/// Sets whether to bail on compiler errors.
#[inline]
pub fn bail(mut self, yes: bool) -> Self {
self.bail = Some(yes);
self
}
/// Sets whether to ignore EIP-3860 initcode size limits.
#[inline]
pub fn ignore_eip_3860(mut self, yes: bool) -> Self {
self.ignore_eip_3860 = yes;
self
}
/// Sets extra files to include, that are not necessarily in the project's source dir.
#[inline]
pub fn files(mut self, files: impl IntoIterator<Item = PathBuf>) -> Self {
self.files.extend(files);
self
}
/// Sets if tests should be dynamically linked.
#[inline]
pub fn dynamic_test_linking(mut self, preprocess: bool) -> Self {
self.dynamic_test_linking = preprocess;
self
}
/// Compiles the project.
#[instrument(target = "forge::compile", skip_all)]
pub fn compile<C: Compiler<CompilerContract = Contract>>(
mut self,
project: &Project<C>,
) -> Result<ProjectCompileOutput<C>>
where
DynamicTestLinkingPreprocessor: Preprocessor<C>,
{
self.project_root = project.root().to_path_buf();
// TODO: Avoid using std::process::exit(0).
// Replacing this with a return (e.g., Ok(ProjectCompileOutput::default())) would be more
// idiomatic, but it currently requires a `Default` bound on `C::Language`, which
// breaks compatibility with downstream crates like `foundry-cli`. This would need a
// broader refactor across the call chain. Leaving it as-is for now until a larger
// refactor is feasible.
if !project.paths.has_input_files() && self.files.is_empty() {
sh_println!("Nothing to compile")?;
std::process::exit(0);
}
// Taking is fine since we don't need these in `compile_with`.
let files = std::mem::take(&mut self.files);
let preprocess = self.dynamic_test_linking;
self.compile_with(|| {
let sources = if !files.is_empty() {
Source::read_all(files)?
} else {
project.paths.read_input_files()?
};
let mut compiler =
foundry_compilers::project::ProjectCompiler::with_sources(project, sources)?;
if preprocess {
compiler = compiler.with_preprocessor(DynamicTestLinkingPreprocessor);
}
compiler.compile().map_err(Into::into)
})
}
/// Compiles the project with the given closure
fn compile_with<C: Compiler<CompilerContract = Contract>, F>(
self,
f: F,
) -> Result<ProjectCompileOutput<C>>
where
F: FnOnce() -> Result<ProjectCompileOutput<C>>,
{
let quiet = self.quiet.unwrap_or(false);
let bail = self.bail.unwrap_or(true);
let output = with_compilation_reporter(quiet, Some(self.project_root.clone()), || {
tracing::debug!("compiling project");
let timer = Instant::now();
let r = f();
let elapsed = timer.elapsed();
tracing::debug!("finished compiling in {:.3}s", elapsed.as_secs_f64());
r
})?;
if bail && output.has_compiler_errors() {
eyre::bail!("{output}")
}
if !quiet {
if !shell::is_json() {
if output.is_unchanged() {
sh_println!("No files changed, compilation skipped")?;
} else {
// print the compiler output / warnings
sh_println!("{output}")?;
}
}
self.handle_output(&output)?;
}
Ok(output)
}
/// If configured, this will print sizes or names
fn handle_output<C: Compiler<CompilerContract = Contract>>(
&self,
output: &ProjectCompileOutput<C>,
) -> Result<()> {
let print_names = self.print_names.unwrap_or(false);
let print_sizes = self.print_sizes.unwrap_or(false);
// print any sizes or names
if print_names {
let mut artifacts: BTreeMap<_, Vec<_>> = BTreeMap::new();
for (name, (_, version)) in output.versioned_artifacts() {
artifacts.entry(version).or_default().push(name);
}
if shell::is_json() {
sh_println!("{}", serde_json::to_string(&artifacts).unwrap())?;
} else {
for (version, names) in artifacts {
sh_println!(
" compiler version: {}.{}.{}",
version.major,
version.minor,
version.patch
)?;
for name in names {
sh_println!(" - {name}")?;
}
}
}
}
if print_sizes {
// add extra newline if names were already printed
if print_names && !shell::is_json() {
sh_println!()?;
}
let mut size_report = SizeReport { contracts: BTreeMap::new() };
let mut artifacts: BTreeMap<String, Vec<_>> = BTreeMap::new();
for (id, artifact) in output.artifact_ids().filter(|(id, _)| {
// filter out forge-std specific contracts
!id.source.to_string_lossy().contains("/forge-std/src/")
}) {
artifacts.entry(id.name.clone()).or_default().push((id.source.clone(), artifact));
}
for (name, artifact_list) in artifacts {
for (path, artifact) in &artifact_list {
let runtime_size = contract_size(*artifact, false).unwrap_or_default();
let init_size = contract_size(*artifact, true).unwrap_or_default();
let is_dev_contract = artifact
.abi
.as_ref()
.map(|abi| {
abi.functions().any(|f| {
f.test_function_kind().is_known()
|| matches!(f.name.as_str(), "IS_TEST" | "IS_SCRIPT")
})
})
.unwrap_or(false);
let unique_name = if artifact_list.len() > 1 {
format!(
"{} ({})",
name,
path.strip_prefix(&self.project_root).unwrap_or(path).display()
)
} else {
name.clone()
};
size_report.contracts.insert(
unique_name,
ContractInfo { runtime_size, init_size, is_dev_contract },
);
}
}
sh_println!("{size_report}")?;
eyre::ensure!(
!size_report.exceeds_runtime_size_limit(),
"some contracts exceed the runtime size limit \
(EIP-170: {CONTRACT_RUNTIME_SIZE_LIMIT} bytes)"
);
// Check size limits only if not ignoring EIP-3860
eyre::ensure!(
self.ignore_eip_3860 || !size_report.exceeds_initcode_size_limit(),
"some contracts exceed the initcode size limit \
(EIP-3860: {CONTRACT_INITCODE_SIZE_LIMIT} bytes)"
);
}
Ok(())
}
}
// https://eips.ethereum.org/EIPS/eip-170
const CONTRACT_RUNTIME_SIZE_LIMIT: usize = 24576;
// https://eips.ethereum.org/EIPS/eip-3860
const CONTRACT_INITCODE_SIZE_LIMIT: usize = 49152;
/// Contracts with info about their size
pub struct SizeReport {
/// `contract name -> info`
pub contracts: BTreeMap<String, ContractInfo>,
}
impl SizeReport {
/// Returns the maximum runtime code size, excluding dev contracts.
pub fn max_runtime_size(&self) -> usize {
self.contracts
.values()
.filter(|c| !c.is_dev_contract)
.map(|c| c.runtime_size)
.max()
.unwrap_or(0)
}
/// Returns the maximum initcode size, excluding dev contracts.
pub fn max_init_size(&self) -> usize {
self.contracts
.values()
.filter(|c| !c.is_dev_contract)
.map(|c| c.init_size)
.max()
.unwrap_or(0)
}
/// Returns true if any contract exceeds the runtime size limit, excluding dev contracts.
pub fn exceeds_runtime_size_limit(&self) -> bool {
self.max_runtime_size() > CONTRACT_RUNTIME_SIZE_LIMIT
}
/// Returns true if any contract exceeds the initcode size limit, excluding dev contracts.
pub fn exceeds_initcode_size_limit(&self) -> bool {
self.max_init_size() > CONTRACT_INITCODE_SIZE_LIMIT
}
}
impl Display for SizeReport {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if shell::is_json() {
writeln!(f, "{}", self.format_json_output())?;
} else {
writeln!(f, "\n{}", self.format_table_output())?;
}
Ok(())
}
}
impl SizeReport {
fn format_json_output(&self) -> String {
let contracts = self
.contracts
.iter()
.filter(|(_, c)| !c.is_dev_contract && (c.runtime_size > 0 || c.init_size > 0))
.map(|(name, contract)| {
(
name.clone(),
serde_json::json!({
"runtime_size": contract.runtime_size,
"init_size": contract.init_size,
"runtime_margin": CONTRACT_RUNTIME_SIZE_LIMIT as isize - contract.runtime_size as isize,
"init_margin": CONTRACT_INITCODE_SIZE_LIMIT as isize - contract.init_size as isize,
}),
)
})
.collect::<serde_json::Map<_, _>>();
serde_json::to_string(&contracts).unwrap()
}
fn format_table_output(&self) -> Table {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![
Cell::new("Contract"),
Cell::new("Runtime Size (B)"),
Cell::new("Initcode Size (B)"),
Cell::new("Runtime Margin (B)"),
Cell::new("Initcode Margin (B)"),
]);
// Filters out dev contracts (Test or Script)
let contracts = self
.contracts
.iter()
.filter(|(_, c)| !c.is_dev_contract && (c.runtime_size > 0 || c.init_size > 0));
for (name, contract) in contracts {
let runtime_margin =
CONTRACT_RUNTIME_SIZE_LIMIT as isize - contract.runtime_size as isize;
let init_margin = CONTRACT_INITCODE_SIZE_LIMIT as isize - contract.init_size as isize;
let runtime_color = match contract.runtime_size {
..18_000 => Color::Reset,
18_000..=CONTRACT_RUNTIME_SIZE_LIMIT => Color::Yellow,
_ => Color::Red,
};
let init_color = match contract.init_size {
..36_000 => Color::Reset,
36_000..=CONTRACT_INITCODE_SIZE_LIMIT => Color::Yellow,
_ => Color::Red,
};
let locale = &Locale::en;
table.add_row([
Cell::new(name),
Cell::new(contract.runtime_size.to_formatted_string(locale)).fg(runtime_color),
Cell::new(contract.init_size.to_formatted_string(locale)).fg(init_color),
Cell::new(runtime_margin.to_formatted_string(locale)).fg(runtime_color),
Cell::new(init_margin.to_formatted_string(locale)).fg(init_color),
]);
}
table
}
}
/// Returns the deployed or init size of the contract.
fn contract_size<T: Artifact>(artifact: &T, initcode: bool) -> Option<usize> {
let bytecode = if initcode {
artifact.get_bytecode_object()?
} else {
artifact.get_deployed_bytecode_object()?
};
let size = match bytecode.as_ref() {
BytecodeObject::Bytecode(bytes) => bytes.len(),
BytecodeObject::Unlinked(unlinked) => {
// we don't need to account for placeholders here, because library placeholders take up
// 40 characters: `__$<library hash>$__` which is the same as a 20byte address in hex.
let mut size = unlinked.len();
if unlinked.starts_with("0x") {
size -= 2;
}
// hex -> bytes
size / 2
}
};
Some(size)
}
/// How big the contract is and whether it is a dev contract where size limits can be neglected
#[derive(Clone, Copy, Debug)]
pub struct ContractInfo {
/// Size of the runtime code in bytes
pub runtime_size: usize,
/// Size of the initcode in bytes
pub init_size: usize,
/// A development contract is either a Script or a Test contract.
pub is_dev_contract: bool,
}
/// Compiles target file path.
///
/// If `quiet` no solc related output will be emitted to stdout.
///
/// **Note:** this expects the `target_path` to be absolute
pub fn compile_target<C: Compiler<CompilerContract = Contract>>(
target_path: &Path,
project: &Project<C>,
quiet: bool,
) -> Result<ProjectCompileOutput<C>>
where
DynamicTestLinkingPreprocessor: Preprocessor<C>,
{
ProjectCompiler::new().quiet(quiet).files([target_path.into()]).compile(project)
}
/// Creates a [Project] from an Etherscan source.
pub fn etherscan_project(metadata: &Metadata, target_path: &Path) -> Result<Project> {
let target_path = dunce::canonicalize(target_path)?;
let sources_path = target_path.join(&metadata.contract_name);
metadata.source_tree().write_to(&target_path)?;
let mut settings = metadata.settings()?;
// make remappings absolute with our root
for remapping in &mut settings.remappings {
let new_path = sources_path.join(remapping.path.trim_start_matches('/'));
remapping.path = new_path.display().to_string();
}
// add missing remappings
if !settings.remappings.iter().any(|remapping| remapping.name.starts_with("@openzeppelin/")) {
let oz = Remapping {
context: None,
name: "@openzeppelin/".into(),
path: sources_path.join("@openzeppelin").display().to_string(),
};
settings.remappings.push(oz);
}
// root/
// ContractName/
// [source code]
let paths = ProjectPathsConfig::builder()
.sources(sources_path.clone())
.remappings(settings.remappings.clone())
.build_with_root(sources_path);
// TODO: detect vyper
let v = metadata.compiler_version()?;
let solc = Solc::find_or_install(&v)?;
let compiler = MultiCompiler { solc: Some(SolcCompiler::Specific(solc)), vyper: None };
Ok(ProjectBuilder::<MultiCompiler>::default()
.settings(MultiCompilerSettings {
solc: SolcSettings {
settings: SolcConfig::builder().settings(settings).build(),
..Default::default()
},
..Default::default()
})
.paths(paths)
.ephemeral()
.no_artifacts()
.build(compiler)?)
}
/// Configures the reporter and runs the given closure.
pub fn with_compilation_reporter<O>(
quiet: bool,
project_root: Option<PathBuf>,
f: impl FnOnce() -> O,
) -> O {
#[expect(clippy::collapsible_else_if)]
let reporter = if quiet || shell::is_json() {
Report::new(NoReporter::default())
} else {
if std::io::stdout().is_terminal() {
Report::new(SpinnerReporter::spawn(project_root))
} else {
Report::new(BasicStdoutReporter::default())
}
};
foundry_compilers::report::with_scoped(&reporter, f)
}
/// Container type for parsing contract identifiers from CLI.
///
/// Passed string can be of the following forms:
/// - `src/Counter.sol` - path to the contract file, in the case where it only contains one contract
/// - `src/Counter.sol:Counter` - path to the contract file and the contract name
/// - `Counter` - contract name only
#[derive(Clone, PartialEq, Eq)]
pub enum PathOrContractInfo {
/// Non-canonicalized path provided via CLI.
Path(PathBuf),
/// Contract info provided via CLI.
ContractInfo(CompilerContractInfo),
}
impl PathOrContractInfo {
/// Returns the path to the contract file if provided.
pub fn path(&self) -> Option<PathBuf> {
match self {
Self::Path(path) => Some(path.to_path_buf()),
Self::ContractInfo(info) => info.path.as_ref().map(PathBuf::from),
}
}
/// Returns the contract name if provided.
pub fn name(&self) -> Option<&str> {
match self {
Self::Path(_) => None,
Self::ContractInfo(info) => Some(&info.name),
}
}
}
impl FromStr for PathOrContractInfo {
type Err = eyre::Error;
fn from_str(s: &str) -> Result<Self> {
if let Ok(contract) = CompilerContractInfo::from_str(s) {
return Ok(Self::ContractInfo(contract));
}
let path = PathBuf::from(s);
if path.extension().is_some_and(|ext| ext == "sol" || ext == "vy") {
return Ok(Self::Path(path));
}
Err(eyre::eyre!("Invalid contract identifier, file is not *.sol or *.vy: {}", s))
}
}
impl std::fmt::Debug for PathOrContractInfo {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Path(path) => write!(f, "Path({})", path.display()),
Self::ContractInfo(info) => {
write!(f, "ContractInfo({info})")
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_contract_identifiers() {
let t = ["src/Counter.sol", "src/Counter.sol:Counter", "Counter"];
let i1 = PathOrContractInfo::from_str(t[0]).unwrap();
assert_eq!(i1, PathOrContractInfo::Path(PathBuf::from(t[0])));
let i2 = PathOrContractInfo::from_str(t[1]).unwrap();
assert_eq!(
i2,
PathOrContractInfo::ContractInfo(CompilerContractInfo {
path: Some("src/Counter.sol".to_string()),
name: "Counter".to_string()
})
);
let i3 = PathOrContractInfo::from_str(t[2]).unwrap();
assert_eq!(
i3,
PathOrContractInfo::ContractInfo(CompilerContractInfo {
path: None,
name: "Counter".to_string()
})
);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/fs.rs | crates/common/src/fs.rs | //! Contains various `std::fs` wrapper functions that also contain the target path in their errors.
use crate::errors::FsPathError;
use flate2::{Compression, read::GzDecoder, write::GzEncoder};
use serde::{Serialize, de::DeserializeOwned};
use std::{
fs::{self, File},
io::{BufReader, BufWriter, Read, Seek, SeekFrom, Write},
path::{Component, Path, PathBuf},
};
/// The [`fs`](self) result type.
pub type Result<T> = std::result::Result<T, FsPathError>;
/// Wrapper for [`File::create`].
pub fn create_file(path: impl AsRef<Path>) -> Result<fs::File> {
let path = path.as_ref();
File::create(path).map_err(|err| FsPathError::create_file(err, path))
}
/// Wrapper for [`std::fs::remove_file`].
pub fn remove_file(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
fs::remove_file(path).map_err(|err| FsPathError::remove_file(err, path))
}
/// Wrapper for [`std::fs::read`].
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
let path = path.as_ref();
fs::read(path).map_err(|err| FsPathError::read(err, path))
}
/// Wrapper for [`std::fs::read_link`].
pub fn read_link(path: impl AsRef<Path>) -> Result<PathBuf> {
let path = path.as_ref();
fs::read_link(path).map_err(|err| FsPathError::read_link(err, path))
}
/// Wrapper for [`std::fs::read_to_string`].
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let path = path.as_ref();
fs::read_to_string(path).map_err(|err| FsPathError::read(err, path))
}
/// Reads the JSON file and deserialize it into the provided type.
pub fn read_json_file<T: DeserializeOwned>(path: &Path) -> Result<T> {
// read the file into a byte array first
// https://github.com/serde-rs/json/issues/160
let s = read_to_string(path)?;
serde_json::from_str(&s).map_err(|source| FsPathError::ReadJson { source, path: path.into() })
}
/// Reads and decodes the json gzip file, then deserialize it into the provided type.
pub fn read_json_gzip_file<T: DeserializeOwned>(path: &Path) -> Result<T> {
let file = open(path)?;
let reader = BufReader::new(file);
let decoder = GzDecoder::new(reader);
serde_json::from_reader(decoder)
.map_err(|source| FsPathError::ReadJson { source, path: path.into() })
}
/// Reads the entire contents of a locked shared file into a string.
pub fn locked_read_to_string(path: impl AsRef<Path>) -> Result<String> {
let path = path.as_ref();
let mut file =
fs::OpenOptions::new().read(true).open(path).map_err(|err| FsPathError::open(err, path))?;
file.lock_shared().map_err(|err| FsPathError::lock(err, path))?;
let contents = read_inner(path, &mut file)?;
file.unlock().map_err(|err| FsPathError::unlock(err, path))?;
String::from_utf8(contents).map_err(|err| FsPathError::read(std::io::Error::other(err), path))
}
/// Reads the entire contents of a locked shared file into a bytes vector.
pub fn locked_read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
let path = path.as_ref();
let mut file =
fs::OpenOptions::new().read(true).open(path).map_err(|err| FsPathError::open(err, path))?;
file.lock_shared().map_err(|err| FsPathError::lock(err, path))?;
let contents = read_inner(path, &mut file)?;
file.unlock().map_err(|err| FsPathError::unlock(err, path))?;
Ok(contents)
}
fn read_inner(path: &Path, file: &mut File) -> Result<Vec<u8>> {
let file_len = file.metadata().map_err(|err| FsPathError::open(err, path))?.len() as usize;
let mut buffer = Vec::with_capacity(file_len);
file.read_to_end(&mut buffer).map_err(|err| FsPathError::read(err, path))?;
Ok(buffer)
}
/// Writes the object as a JSON object.
pub fn write_json_file<T: Serialize>(path: &Path, obj: &T) -> Result<()> {
let file = create_file(path)?;
let mut writer = BufWriter::new(file);
serde_json::to_writer(&mut writer, obj)
.map_err(|source| FsPathError::WriteJson { source, path: path.into() })?;
writer.flush().map_err(|e| FsPathError::write(e, path))
}
/// Writes the object as a pretty JSON object.
pub fn write_pretty_json_file<T: Serialize>(path: &Path, obj: &T) -> Result<()> {
let file = create_file(path)?;
let mut writer = BufWriter::new(file);
serde_json::to_writer_pretty(&mut writer, obj)
.map_err(|source| FsPathError::WriteJson { source, path: path.into() })?;
writer.flush().map_err(|e| FsPathError::write(e, path))
}
/// Writes the object as a gzip compressed file.
pub fn write_json_gzip_file<T: Serialize>(path: &Path, obj: &T) -> Result<()> {
let file = create_file(path)?;
let writer = BufWriter::new(file);
let mut encoder = GzEncoder::new(writer, Compression::default());
serde_json::to_writer(&mut encoder, obj)
.map_err(|source| FsPathError::WriteJson { source, path: path.into() })?;
// Ensure we surface any I/O errors on final gzip write and buffer flush.
let mut inner_writer = encoder.finish().map_err(|e| FsPathError::write(e, path))?;
inner_writer.flush().map_err(|e| FsPathError::write(e, path))?;
Ok(())
}
/// Wrapper for `std::fs::write`
pub fn write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result<()> {
let path = path.as_ref();
fs::write(path, contents).map_err(|err| FsPathError::write(err, path))
}
/// Writes all content in an exclusive locked file.
pub fn locked_write(path: impl AsRef<Path>, contents: impl AsRef<[u8]>) -> Result<()> {
let path = path.as_ref();
let mut file = fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
.map_err(|err| FsPathError::open(err, path))?;
file.lock().map_err(|err| FsPathError::lock(err, path))?;
file.write_all(contents.as_ref()).map_err(|err| FsPathError::write(err, path))?;
file.unlock().map_err(|err| FsPathError::unlock(err, path))
}
/// Writes a line in an exclusive locked file.
pub fn locked_write_line(path: impl AsRef<Path>, line: &str) -> Result<()> {
let path = path.as_ref();
if cfg!(windows) {
return locked_write_line_windows(path, line);
}
let mut file = std::fs::OpenOptions::new()
.append(true)
.create(true)
.open(path)
.map_err(|err| FsPathError::open(err, path))?;
file.lock().map_err(|err| FsPathError::lock(err, path))?;
writeln!(file, "{line}").map_err(|err| FsPathError::write(err, path))?;
file.unlock().map_err(|err| FsPathError::unlock(err, path))
}
// Locking fails on Windows if the file is opened in append mode.
fn locked_write_line_windows(path: &Path, line: &str) -> Result<()> {
let mut file = std::fs::OpenOptions::new()
.write(true)
.truncate(false)
.create(true)
.open(path)
.map_err(|err| FsPathError::open(err, path))?;
file.lock().map_err(|err| FsPathError::lock(err, path))?;
file.seek(SeekFrom::End(0)).map_err(|err| FsPathError::write(err, path))?;
writeln!(file, "{line}").map_err(|err| FsPathError::write(err, path))?;
file.unlock().map_err(|err| FsPathError::unlock(err, path))
}
/// Wrapper for `std::fs::copy`
pub fn copy(from: impl AsRef<Path>, to: impl AsRef<Path>) -> Result<u64> {
let from = from.as_ref();
let to = to.as_ref();
fs::copy(from, to).map_err(|err| FsPathError::copy(err, from, to))
}
/// Wrapper for `std::fs::create_dir`
pub fn create_dir(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
fs::create_dir(path).map_err(|err| FsPathError::create_dir(err, path))
}
/// Wrapper for `std::fs::create_dir_all`
pub fn create_dir_all(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
fs::create_dir_all(path).map_err(|err| FsPathError::create_dir(err, path))
}
/// Wrapper for `std::fs::remove_dir`
pub fn remove_dir(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
fs::remove_dir(path).map_err(|err| FsPathError::remove_dir(err, path))
}
/// Wrapper for `std::fs::remove_dir_all`
pub fn remove_dir_all(path: impl AsRef<Path>) -> Result<()> {
let path = path.as_ref();
fs::remove_dir_all(path).map_err(|err| FsPathError::remove_dir(err, path))
}
/// Wrapper for `std::fs::File::open`
pub fn open(path: impl AsRef<Path>) -> Result<fs::File> {
let path = path.as_ref();
fs::File::open(path).map_err(|err| FsPathError::open(err, path))
}
/// Normalize a path, removing things like `.` and `..`.
///
/// NOTE: This does not return symlinks and does not touch the filesystem at all (unlike
/// [`std::fs::canonicalize`])
///
/// ref: <https://github.com/rust-lang/cargo/blob/9ded34a558a900563b0acf3730e223c649cf859d/crates/cargo-util/src/paths.rs#L81>
pub fn normalize_path(path: &Path) -> PathBuf {
let mut components = path.components().peekable();
let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
components.next();
PathBuf::from(c.as_os_str())
} else {
PathBuf::new()
};
for component in components {
match component {
Component::Prefix(..) => unreachable!(),
Component::RootDir => {
ret.push(component.as_os_str());
}
Component::CurDir => {}
Component::ParentDir => {
ret.pop();
}
Component::Normal(c) => {
ret.push(c);
}
}
}
ret
}
/// Returns an iterator over all files with the given extension under the `root` dir.
pub fn files_with_ext<'a>(root: &Path, ext: &'a str) -> impl Iterator<Item = PathBuf> + 'a {
walkdir::WalkDir::new(root)
.sort_by_file_name()
.into_iter()
.filter_map(walkdir::Result::ok)
.filter(|e| e.file_type().is_file() && e.path().extension() == Some(ext.as_ref()))
.map(walkdir::DirEntry::into_path)
}
/// Returns an iterator over all JSON files under the `root` dir.
pub fn json_files(root: &Path) -> impl Iterator<Item = PathBuf> {
files_with_ext(root, "json")
}
/// Canonicalize a path, returning an error if the path does not exist.
///
/// Mainly useful to apply canonicalization to paths obtained from project files but still error
/// properly instead of flattening the errors.
pub fn canonicalize_path(path: impl AsRef<Path>) -> std::io::Result<PathBuf> {
dunce::canonicalize(path)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_normalize_path() {
let p = Path::new("/a/../file.txt");
let normalized = normalize_path(p);
assert_eq!(normalized, PathBuf::from("/file.txt"));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/abi.rs | crates/common/src/abi.rs | //! ABI related helper functions.
use alloy_chains::Chain;
use alloy_dyn_abi::{DynSolType, DynSolValue, FunctionExt, JsonAbiExt};
use alloy_json_abi::{Error, Event, Function, Param};
use alloy_primitives::{Address, LogData, hex};
use eyre::{Context, ContextCompat, Result};
use foundry_block_explorers::{Client, contract::ContractMetadata, errors::EtherscanError};
use std::pin::Pin;
pub fn encode_args<I, S>(inputs: &[Param], args: I) -> Result<Vec<DynSolValue>>
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
let args: Vec<S> = args.into_iter().collect();
if inputs.len() != args.len() {
eyre::bail!("encode length mismatch: expected {} types, got {}", inputs.len(), args.len())
}
std::iter::zip(inputs, args)
.map(|(input, arg)| coerce_value(&input.selector_type(), arg.as_ref()))
.collect()
}
/// Given a function and a vector of string arguments, it proceeds to convert the args to alloy
/// [DynSolValue]s and then ABI encode them, prefixes the encoded data with the function selector.
pub fn encode_function_args<I, S>(func: &Function, args: I) -> Result<Vec<u8>>
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
Ok(func.abi_encode_input(&encode_args(&func.inputs, args)?)?)
}
/// Given a function and a vector of string arguments, it proceeds to convert the args to alloy
/// [DynSolValue]s and then ABI encode them. Doesn't prefix the function selector.
pub fn encode_function_args_raw<I, S>(func: &Function, args: I) -> Result<Vec<u8>>
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
Ok(func.abi_encode_input_raw(&encode_args(&func.inputs, args)?)?)
}
/// Given a function and a vector of string arguments, it proceeds to convert the args to alloy
/// [DynSolValue]s and encode them using the packed encoding.
pub fn encode_function_args_packed<I, S>(func: &Function, args: I) -> Result<Vec<u8>>
where
I: IntoIterator<Item = S>,
S: AsRef<str>,
{
let args: Vec<S> = args.into_iter().collect();
if func.inputs.len() != args.len() {
eyre::bail!(
"encode length mismatch: expected {} types, got {}",
func.inputs.len(),
args.len(),
);
}
let params: Vec<Vec<u8>> = std::iter::zip(&func.inputs, args)
.map(|(input, arg)| coerce_value(&input.selector_type(), arg.as_ref()))
.collect::<Result<Vec<_>>>()?
.into_iter()
.map(|v| v.abi_encode_packed())
.collect();
Ok(params.concat())
}
/// Decodes the calldata of the function
pub fn abi_decode_calldata(
sig: &str,
calldata: &str,
input: bool,
fn_selector: bool,
) -> Result<Vec<DynSolValue>> {
let func = get_func(sig)?;
let calldata = hex::decode(calldata)?;
let mut calldata = calldata.as_slice();
// If function selector is prefixed in "calldata", remove it (first 4 bytes)
if input && fn_selector && calldata.len() >= 4 {
calldata = &calldata[4..];
}
let res =
if input { func.abi_decode_input(calldata) } else { func.abi_decode_output(calldata) }?;
// in case the decoding worked but nothing was decoded
if res.is_empty() {
eyre::bail!("no data was decoded")
}
Ok(res)
}
/// Given a function signature string, it tries to parse it as a `Function`
pub fn get_func(sig: &str) -> Result<Function> {
Function::parse(sig).wrap_err("could not parse function signature")
}
/// Given an event signature string, it tries to parse it as a `Event`
pub fn get_event(sig: &str) -> Result<Event> {
Event::parse(sig).wrap_err("could not parse event signature")
}
/// Given an error signature string, it tries to parse it as a `Error`
pub fn get_error(sig: &str) -> Result<Error> {
Error::parse(sig).wrap_err("could not parse error signature")
}
/// Given an event without indexed parameters and a rawlog, it tries to return the event with the
/// proper indexed parameters. Otherwise, it returns the original event.
pub fn get_indexed_event(mut event: Event, raw_log: &LogData) -> Event {
if !event.anonymous && raw_log.topics().len() > 1 {
let indexed_params = raw_log.topics().len() - 1;
let num_inputs = event.inputs.len();
let num_address_params = event.inputs.iter().filter(|p| p.ty == "address").count();
event.inputs.iter_mut().enumerate().for_each(|(index, param)| {
if param.name.is_empty() {
param.name = format!("param{index}");
}
if num_inputs == indexed_params
|| (num_address_params == indexed_params && param.ty == "address")
{
param.indexed = true;
}
})
}
event
}
/// Given a function name, address, and args, tries to parse it as a `Function` by fetching the
/// abi from etherscan. If the address is a proxy, fetches the ABI of the implementation contract.
pub async fn get_func_etherscan(
function_name: &str,
contract: Address,
args: &[String],
chain: Chain,
etherscan_api_key: &str,
) -> Result<Function> {
let client = Client::new(chain, etherscan_api_key)?;
let source = find_source(client, contract).await?;
let metadata = source.items.first().wrap_err("etherscan returned empty metadata")?;
let mut abi = metadata.abi()?;
let funcs = abi.functions.remove(function_name).unwrap_or_default();
for func in funcs {
let res = encode_function_args(&func, args);
if res.is_ok() {
return Ok(func);
}
}
Err(eyre::eyre!("Function not found in abi"))
}
/// If the code at `address` is a proxy, recurse until we find the implementation.
pub fn find_source(
client: Client,
address: Address,
) -> Pin<Box<dyn Future<Output = Result<ContractMetadata>>>> {
Box::pin(async move {
trace!(%address, "find Etherscan source");
let source = client.contract_source_code(address).await?;
let metadata = source.items.first().wrap_err("Etherscan returned no data")?;
if metadata.proxy == 0 {
Ok(source)
} else {
let implementation = metadata.implementation.unwrap();
sh_println!(
"Contract at {address} is a proxy, trying to fetch source at {implementation}..."
)?;
match find_source(client, implementation).await {
impl_source @ Ok(_) => impl_source,
Err(e) => {
let err = EtherscanError::ContractCodeNotVerified(address).to_string();
if e.to_string() == err {
error!(%err);
Ok(source)
} else {
Err(e)
}
}
}
}
})
}
/// Helper function to coerce a value to a [DynSolValue] given a type string
pub fn coerce_value(ty: &str, arg: &str) -> Result<DynSolValue> {
let ty = DynSolType::parse(ty)?;
Ok(DynSolType::coerce_str(&ty, arg)?)
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_dyn_abi::EventExt;
use alloy_primitives::{B256, U256};
#[test]
fn test_get_func() {
let func = get_func("function foo(uint256 a, uint256 b) returns (uint256)");
assert!(func.is_ok());
let func = func.unwrap();
assert_eq!(func.name, "foo");
assert_eq!(func.inputs.len(), 2);
assert_eq!(func.inputs[0].ty, "uint256");
assert_eq!(func.inputs[1].ty, "uint256");
// Stripped down function, which [Function] can parse.
let func = get_func("foo(bytes4 a, uint8 b)(bytes4)");
assert!(func.is_ok());
let func = func.unwrap();
assert_eq!(func.name, "foo");
assert_eq!(func.inputs.len(), 2);
assert_eq!(func.inputs[0].ty, "bytes4");
assert_eq!(func.inputs[1].ty, "uint8");
assert_eq!(func.outputs[0].ty, "bytes4");
}
#[test]
fn test_indexed_only_address() {
let event = get_event("event Ev(address,uint256,address)").unwrap();
let param0 = B256::random();
let param1 = vec![3; 32];
let param2 = B256::random();
let log = LogData::new_unchecked(vec![event.selector(), param0, param2], param1.into());
let event = get_indexed_event(event, &log);
assert_eq!(event.inputs.len(), 3);
// Only the address fields get indexed since total_params > num_indexed_params
let parsed = event.decode_log(&log).unwrap();
assert_eq!(event.inputs.iter().filter(|param| param.indexed).count(), 2);
assert_eq!(parsed.indexed[0], DynSolValue::Address(Address::from_word(param0)));
assert_eq!(parsed.body[0], DynSolValue::Uint(U256::from_be_bytes([3; 32]), 256));
assert_eq!(parsed.indexed[1], DynSolValue::Address(Address::from_word(param2)));
}
#[test]
fn test_indexed_all() {
let event = get_event("event Ev(address,uint256,address)").unwrap();
let param0 = B256::random();
let param1 = vec![3; 32];
let param2 = B256::random();
let log = LogData::new_unchecked(
vec![event.selector(), param0, B256::from_slice(¶m1), param2],
vec![].into(),
);
let event = get_indexed_event(event, &log);
assert_eq!(event.inputs.len(), 3);
// All parameters get indexed since num_indexed_params == total_params
assert_eq!(event.inputs.iter().filter(|param| param.indexed).count(), 3);
let parsed = event.decode_log(&log).unwrap();
assert_eq!(parsed.indexed[0], DynSolValue::Address(Address::from_word(param0)));
assert_eq!(parsed.indexed[1], DynSolValue::Uint(U256::from_be_bytes([3; 32]), 256));
assert_eq!(parsed.indexed[2], DynSolValue::Address(Address::from_word(param2)));
}
#[test]
fn test_encode_args_length_validation() {
use alloy_json_abi::Param;
let params = vec![
Param {
name: "a".to_string(),
ty: "uint256".to_string(),
internal_type: None,
components: vec![],
},
Param {
name: "b".to_string(),
ty: "address".to_string(),
internal_type: None,
components: vec![],
},
];
// Less arguments than parameters
let args = vec!["1"];
let res = encode_args(¶ms, &args);
assert!(res.is_err());
assert!(format!("{}", res.unwrap_err()).contains("encode length mismatch"));
// Exact number of arguments and parameters
let args = vec!["1", "0x0000000000000000000000000000000000000001"];
let res = encode_args(¶ms, &args);
assert!(res.is_ok());
let values = res.unwrap();
assert_eq!(values.len(), 2);
// More arguments than parameters
let args = vec!["1", "0x0000000000000000000000000000000000000001", "extra"];
let res = encode_args(¶ms, &args);
assert!(res.is_err());
assert!(format!("{}", res.unwrap_err()).contains("encode length mismatch"));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/selectors.rs | crates/common/src/selectors.rs | //! Support for handling/identifying selectors.
#![allow(missing_docs)]
use crate::{abi::abi_decode_calldata, provider::runtime_transport::RuntimeTransportBuilder};
use alloy_json_abi::JsonAbi;
use alloy_primitives::{B256, Selector, map::HashMap};
use eyre::Context;
use itertools::Itertools;
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use std::{
fmt,
sync::{
Arc,
atomic::{AtomicBool, AtomicUsize, Ordering},
},
time::Duration,
};
const BASE_URL: &str = "https://api.4byte.sourcify.dev";
const SELECTOR_LOOKUP_URL: &str = "https://api.4byte.sourcify.dev/signature-database/v1/lookup";
const SELECTOR_IMPORT_URL: &str = "https://api.4byte.sourcify.dev/signature-database/v1/import";
/// The standard request timeout for API requests.
const REQ_TIMEOUT: Duration = Duration::from_secs(15);
/// How many request can time out before we decide this is a spurious connection.
const MAX_TIMEDOUT_REQ: usize = 4usize;
/// List of signatures for a given [`SelectorKind`].
pub type OpenChainSignatures = Vec<String>;
/// A client that can request API data from OpenChain.
#[derive(Clone, Debug)]
pub struct OpenChainClient {
inner: reqwest::Client,
/// Whether the connection is spurious, or API is down
spurious_connection: Arc<AtomicBool>,
/// How many requests timed out
timedout_requests: Arc<AtomicUsize>,
/// Max allowed request that can time out
max_timedout_requests: usize,
}
impl OpenChainClient {
/// Creates a new client with default settings.
pub fn new() -> eyre::Result<Self> {
let inner = RuntimeTransportBuilder::new(BASE_URL.parse().unwrap())
.with_timeout(REQ_TIMEOUT)
.build()
.reqwest_client()
.wrap_err("failed to build OpenChain client")?;
Ok(Self {
inner,
spurious_connection: Default::default(),
timedout_requests: Default::default(),
max_timedout_requests: MAX_TIMEDOUT_REQ,
})
}
async fn get_text(&self, url: impl reqwest::IntoUrl + fmt::Display) -> reqwest::Result<String> {
trace!(%url, "GET");
self.inner
.get(url)
.send()
.await
.inspect_err(|err| self.on_reqwest_err(err))?
.text()
.await
.inspect_err(|err| self.on_reqwest_err(err))
}
/// Sends a new post request
async fn post_json<T: Serialize + std::fmt::Debug, R: DeserializeOwned>(
&self,
url: &str,
body: &T,
) -> reqwest::Result<R> {
trace!(%url, body=?serde_json::to_string(body), "POST");
self.inner
.post(url)
.json(body)
.send()
.await
.inspect_err(|err| self.on_reqwest_err(err))?
.json()
.await
.inspect_err(|err| self.on_reqwest_err(err))
}
fn on_reqwest_err(&self, err: &reqwest::Error) {
fn is_connectivity_err(err: &reqwest::Error) -> bool {
if err.is_timeout() || err.is_connect() {
return true;
}
// Error HTTP codes (5xx) are considered connectivity issues and will prompt retry
if let Some(status) = err.status() {
let code = status.as_u16();
if (500..600).contains(&code) {
return true;
}
}
false
}
if is_connectivity_err(err) {
warn!("spurious network detected for OpenChain");
let previous = self.timedout_requests.fetch_add(1, Ordering::SeqCst);
if previous + 1 >= self.max_timedout_requests {
self.set_spurious();
}
}
}
/// Returns whether the connection was marked as spurious
fn is_spurious(&self) -> bool {
self.spurious_connection.load(Ordering::Relaxed)
}
/// Marks the connection as spurious
fn set_spurious(&self) {
self.spurious_connection.store(true, Ordering::Relaxed)
}
fn ensure_not_spurious(&self) -> eyre::Result<()> {
if self.is_spurious() {
eyre::bail!("Spurious connection detected")
}
Ok(())
}
/// Decodes the given function or event selector using OpenChain
pub async fn decode_selector(
&self,
selector: SelectorKind,
) -> eyre::Result<OpenChainSignatures> {
Ok(self.decode_selectors(&[selector]).await?.pop().unwrap())
}
/// Decodes the given function, error or event selectors using OpenChain.
pub async fn decode_selectors(
&self,
selectors: &[SelectorKind],
) -> eyre::Result<Vec<OpenChainSignatures>> {
if selectors.is_empty() {
return Ok(vec![]);
}
if enabled!(tracing::Level::TRACE) {
trace!(?selectors, "decoding selectors");
} else {
debug!(len = selectors.len(), "decoding selectors");
}
// Exit early if spurious connection.
self.ensure_not_spurious()?;
// Build the URL with the query string.
let mut url: url::Url = SELECTOR_LOOKUP_URL.parse().unwrap();
{
let mut query = url.query_pairs_mut();
let functions = selectors.iter().filter_map(SelectorKind::as_function);
if functions.clone().next().is_some() {
query.append_pair("function", &functions.format(",").to_string());
}
let events = selectors.iter().filter_map(SelectorKind::as_event);
if events.clone().next().is_some() {
query.append_pair("event", &events.format(",").to_string());
}
let _ = query.finish();
}
let text = self.get_text(url).await?;
let SignatureResponse { ok, result } = match serde_json::from_str(&text) {
Ok(response) => response,
Err(err) => eyre::bail!("could not decode response: {err}: {text}"),
};
if !ok {
eyre::bail!("OpenChain returned an error: {text}");
}
Ok(selectors
.iter()
.map(|selector| {
let signatures = match selector {
SelectorKind::Function(selector) | SelectorKind::Error(selector) => {
result.function.get(selector)
}
SelectorKind::Event(hash) => result.event.get(hash),
};
signatures
.map(Option::as_deref)
.unwrap_or_default()
.unwrap_or_default()
.iter()
.map(|sig| sig.name.clone())
.collect()
})
.collect())
}
/// Fetches a function signature given the selector using OpenChain
pub async fn decode_function_selector(
&self,
selector: Selector,
) -> eyre::Result<OpenChainSignatures> {
self.decode_selector(SelectorKind::Function(selector)).await
}
/// Fetches all possible signatures and attempts to abi decode the calldata
pub async fn decode_calldata(&self, calldata: &str) -> eyre::Result<OpenChainSignatures> {
let calldata = calldata.strip_prefix("0x").unwrap_or(calldata);
if calldata.len() < 8 {
eyre::bail!(
"Calldata too short: expected at least 8 characters (excluding 0x prefix), got {}.",
calldata.len()
)
}
let mut sigs = self.decode_function_selector(calldata[..8].parse()?).await?;
// Retain only signatures that can be decoded.
sigs.retain(|sig| abi_decode_calldata(sig, calldata, true, true).is_ok());
Ok(sigs)
}
/// Fetches an event signature given the 32 byte topic using OpenChain.
pub async fn decode_event_topic(&self, topic: B256) -> eyre::Result<OpenChainSignatures> {
self.decode_selector(SelectorKind::Event(topic)).await
}
/// Pretty print calldata and if available, fetch possible function signatures
///
/// ```no_run
/// use foundry_common::selectors::OpenChainClient;
///
/// # async fn foo() -> eyre::Result<()> {
/// let pretty_data = OpenChainClient::new()?
/// .pretty_calldata(
/// "0x70a08231000000000000000000000000d0074f4e6490ae3f888d1d4f7e3e43326bd3f0f5"
/// .to_string(),
/// false,
/// )
/// .await?;
/// println!("{}", pretty_data);
/// # Ok(())
/// # }
/// ```
pub async fn pretty_calldata(
&self,
calldata: impl AsRef<str>,
offline: bool,
) -> eyre::Result<PossibleSigs> {
let mut possible_info = PossibleSigs::new();
let calldata = calldata.as_ref().trim_start_matches("0x");
let selector =
calldata.get(..8).ok_or_else(|| eyre::eyre!("calldata cannot be less that 4 bytes"))?;
let sigs = if offline {
vec![]
} else {
let selector = selector.parse()?;
self.decode_function_selector(selector).await.unwrap_or_default().into_iter().collect()
};
let (_, data) = calldata.split_at(8);
if !data.len().is_multiple_of(64) {
eyre::bail!("\nInvalid calldata size")
}
let row_length = data.len() / 64;
for row in 0..row_length {
possible_info.data.push(data[64 * row..64 * (row + 1)].to_string());
}
if sigs.is_empty() {
possible_info.method = SelectorOrSig::Selector(selector.to_string());
} else {
possible_info.method = SelectorOrSig::Sig(sigs);
}
Ok(possible_info)
}
/// uploads selectors to OpenChain using the given data
pub async fn import_selectors(
&self,
data: SelectorImportData,
) -> eyre::Result<SelectorImportResponse> {
self.ensure_not_spurious()?;
let request = match data {
SelectorImportData::Abi(abis) => {
let functions_and_errors: OpenChainSignatures = abis
.iter()
.flat_map(|abi| {
abi.functions()
.map(|func| func.signature())
.chain(abi.errors().map(|error| error.signature()))
.collect::<Vec<_>>()
})
.collect();
let events = abis
.iter()
.flat_map(|abi| abi.events().map(|event| event.signature()))
.collect::<Vec<_>>();
SelectorImportRequest { function: functions_and_errors, event: events }
}
SelectorImportData::Raw(raw) => {
let function_and_error =
raw.function.iter().chain(raw.error.iter()).cloned().collect::<Vec<_>>();
SelectorImportRequest { function: function_and_error, event: raw.event }
}
};
Ok(self.post_json(SELECTOR_IMPORT_URL, &request).await?)
}
}
pub enum SelectorOrSig {
Selector(String),
Sig(OpenChainSignatures),
}
pub struct PossibleSigs {
method: SelectorOrSig,
data: OpenChainSignatures,
}
impl PossibleSigs {
fn new() -> Self {
Self { method: SelectorOrSig::Selector("0x00000000".to_string()), data: vec![] }
}
}
impl fmt::Display for PossibleSigs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self.method {
SelectorOrSig::Selector(selector) => {
writeln!(f, "\n Method: {selector}")?;
}
SelectorOrSig::Sig(sigs) => {
writeln!(f, "\n Possible methods:")?;
for sig in sigs {
writeln!(f, " - {sig}")?;
}
}
}
writeln!(f, " ------------")?;
for (i, row) in self.data.iter().enumerate() {
let row_label_decimal = i * 32;
let row_label_hex = format!("{row_label_decimal:03x}");
writeln!(f, " [{row_label_hex}]: {row}")?;
}
Ok(())
}
}
/// The kind of selector to fetch from OpenChain.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum SelectorKind {
/// A function selector.
Function(Selector),
/// A custom error selector. Behaves the same as a function selector.
Error(Selector),
/// An event selector.
Event(B256),
}
impl SelectorKind {
/// Returns the function selector if it is a function OR custom error.
pub fn as_function(&self) -> Option<Selector> {
match *self {
Self::Function(selector) | Self::Error(selector) => Some(selector),
_ => None,
}
}
/// Returns the event selector if it is an event.
pub fn as_event(&self) -> Option<B256> {
match *self {
Self::Event(hash) => Some(hash),
_ => None,
}
}
}
/// Decodes the given function or event selector using OpenChain.
pub async fn decode_selector(selector: SelectorKind) -> eyre::Result<OpenChainSignatures> {
OpenChainClient::new()?.decode_selector(selector).await
}
/// Decodes the given function or event selectors using OpenChain.
pub async fn decode_selectors(
selectors: &[SelectorKind],
) -> eyre::Result<Vec<OpenChainSignatures>> {
OpenChainClient::new()?.decode_selectors(selectors).await
}
/// Fetches a function signature given the selector using OpenChain.
pub async fn decode_function_selector(selector: Selector) -> eyre::Result<OpenChainSignatures> {
OpenChainClient::new()?.decode_function_selector(selector).await
}
/// Fetches all possible signatures and attempts to abi decode the calldata using OpenChain.
pub async fn decode_calldata(calldata: &str) -> eyre::Result<OpenChainSignatures> {
OpenChainClient::new()?.decode_calldata(calldata).await
}
/// Fetches an event signature given the 32 byte topic using OpenChain.
pub async fn decode_event_topic(topic: B256) -> eyre::Result<OpenChainSignatures> {
OpenChainClient::new()?.decode_event_topic(topic).await
}
/// Pretty print calldata and if available, fetch possible function signatures.
///
/// ```no_run
/// use foundry_common::selectors::pretty_calldata;
///
/// # async fn foo() -> eyre::Result<()> {
/// let pretty_data = pretty_calldata(
/// "0x70a08231000000000000000000000000d0074f4e6490ae3f888d1d4f7e3e43326bd3f0f5".to_string(),
/// false,
/// )
/// .await?;
/// println!("{}", pretty_data);
/// # Ok(())
/// # }
/// ```
pub async fn pretty_calldata(
calldata: impl AsRef<str>,
offline: bool,
) -> eyre::Result<PossibleSigs> {
OpenChainClient::new()?.pretty_calldata(calldata, offline).await
}
#[derive(Debug, Default, PartialEq, Eq, Serialize)]
pub struct RawSelectorImportData {
pub function: OpenChainSignatures,
pub event: OpenChainSignatures,
pub error: OpenChainSignatures,
}
impl RawSelectorImportData {
pub fn is_empty(&self) -> bool {
self.function.is_empty() && self.event.is_empty() && self.error.is_empty()
}
}
#[derive(Serialize)]
#[serde(untagged)]
pub enum SelectorImportData {
Abi(Vec<JsonAbi>),
Raw(RawSelectorImportData),
}
#[derive(Debug, Default, Serialize)]
struct SelectorImportRequest {
function: OpenChainSignatures,
event: OpenChainSignatures,
}
#[derive(Debug, Deserialize)]
struct SelectorImportEffect {
imported: HashMap<String, String>,
duplicated: HashMap<String, String>,
}
#[derive(Debug, Deserialize)]
struct SelectorImportResult {
function: SelectorImportEffect,
event: SelectorImportEffect,
}
#[derive(Debug, Deserialize)]
pub struct SelectorImportResponse {
result: SelectorImportResult,
}
impl SelectorImportResponse {
/// Print info about the functions which were uploaded or already known
pub fn describe(&self) {
self.result.function.imported.iter().for_each(|(k, v)| {
let _ = sh_println!("Imported: Function {k}: {v}");
});
self.result.event.imported.iter().for_each(|(k, v)| {
let _ = sh_println!("Imported: Event {k}: {v}");
});
self.result.function.duplicated.iter().for_each(|(k, v)| {
let _ = sh_println!("Duplicated: Function {k}: {v}");
});
self.result.event.duplicated.iter().for_each(|(k, v)| {
let _ = sh_println!("Duplicated: Event {k}: {v}");
});
let _ = sh_println!("Selectors successfully uploaded to OpenChain");
}
}
/// uploads selectors to OpenChain using the given data
pub async fn import_selectors(data: SelectorImportData) -> eyre::Result<SelectorImportResponse> {
OpenChainClient::new()?.import_selectors(data).await
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ParsedSignatures {
pub signatures: RawSelectorImportData,
pub abis: Vec<JsonAbi>,
}
#[derive(Deserialize)]
struct Artifact {
abi: JsonAbi,
}
/// Parses a list of tokens into function, event, and error signatures.
/// Also handles JSON artifact files
/// Ignores invalid tokens
pub fn parse_signatures(tokens: Vec<String>) -> ParsedSignatures {
// if any of the given tokens are json artifact files,
// Parse them and read in the ABI from the file
let abis = tokens
.iter()
.filter(|sig| sig.ends_with(".json"))
.filter_map(|filename| std::fs::read_to_string(filename).ok())
.filter_map(|file| serde_json::from_str(file.as_str()).ok())
.map(|artifact: Artifact| artifact.abi)
.collect();
// for tokens that are not json artifact files,
// try to parse them as raw signatures
let signatures = tokens.iter().filter(|sig| !sig.ends_with(".json")).fold(
RawSelectorImportData::default(),
|mut data, signature| {
let mut split = signature.split(' ');
match split.next() {
Some("function") => {
if let Some(sig) = split.next() {
data.function.push(sig.to_string())
}
}
Some("event") => {
if let Some(sig) = split.next() {
data.event.push(sig.to_string())
}
}
Some("error") => {
if let Some(sig) = split.next() {
data.error.push(sig.to_string())
}
}
Some(signature) => {
// if no type given, assume function
data.function.push(signature.to_string());
}
None => {}
}
data
},
);
ParsedSignatures { signatures, abis }
}
/// [`SELECTOR_LOOKUP_URL`] response.
#[derive(Deserialize)]
struct SignatureResponse {
ok: bool,
result: SignatureResult,
}
#[derive(Deserialize)]
struct SignatureResult {
event: HashMap<B256, Option<Vec<Signature>>>,
function: HashMap<Selector, Option<Vec<Signature>>>,
}
#[derive(Deserialize)]
struct Signature {
name: String,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_signatures() {
let result = parse_signatures(vec!["transfer(address,uint256)".to_string()]);
assert_eq!(
result,
ParsedSignatures {
signatures: RawSelectorImportData {
function: vec!["transfer(address,uint256)".to_string()],
..Default::default()
},
..Default::default()
}
);
let result = parse_signatures(vec![
"transfer(address,uint256)".to_string(),
"function approve(address,uint256)".to_string(),
]);
assert_eq!(
result,
ParsedSignatures {
signatures: RawSelectorImportData {
function: vec![
"transfer(address,uint256)".to_string(),
"approve(address,uint256)".to_string()
],
..Default::default()
},
..Default::default()
}
);
let result = parse_signatures(vec![
"transfer(address,uint256)".to_string(),
"event Approval(address,address,uint256)".to_string(),
"error ERC20InsufficientBalance(address,uint256,uint256)".to_string(),
]);
assert_eq!(
result,
ParsedSignatures {
signatures: RawSelectorImportData {
function: vec!["transfer(address,uint256)".to_string()],
event: vec!["Approval(address,address,uint256)".to_string()],
error: vec!["ERC20InsufficientBalance(address,uint256,uint256)".to_string()]
},
..Default::default()
}
);
// skips invalid
let result = parse_signatures(vec!["event".to_string()]);
assert_eq!(
result,
ParsedSignatures { signatures: Default::default(), ..Default::default() }
);
}
#[tokio::test]
async fn spurious_marked_on_timeout_threshold() {
// Use an unreachable local port to trigger a quick connect error.
let client = OpenChainClient::new().expect("client must build");
let url = "http://127.0.0.1:9"; // Discard port; typically closed and fails fast.
// After MAX_TIMEDOUT_REQ - 1 failures we should NOT be spurious.
for i in 0..(MAX_TIMEDOUT_REQ - 1) {
let _ = client.get_text(url).await; // expect an error and internal counter increment
assert!(!client.is_spurious(), "unexpected spurious after {} failed attempts", i + 1);
}
// The Nth failure (N == MAX_TIMEDOUT_REQ) should flip the spurious flag.
let _ = client.get_text(url).await;
assert!(client.is_spurious(), "expected spurious after threshold failures");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/contracts.rs | crates/common/src/contracts.rs | //! Commonly used contract types and functions.
use crate::{compile::PathOrContractInfo, find_metadata_start, strip_bytecode_placeholders};
use alloy_dyn_abi::JsonAbiExt;
use alloy_json_abi::{Event, Function, JsonAbi};
use alloy_primitives::{Address, B256, Bytes, Selector, hex};
use eyre::{OptionExt, Result};
use foundry_compilers::{
ArtifactId, Project, ProjectCompileOutput,
artifacts::{
BytecodeObject, CompactBytecode, CompactContractBytecode, CompactContractBytecodeCow,
CompactDeployedBytecode, ConfigurableContractArtifact, ContractBytecodeSome, Offsets,
StorageLayout,
},
utils::canonicalized,
};
use std::{
collections::BTreeMap,
ops::Deref,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
/// Libraries' runtime code always starts with the following instruction:
/// `PUSH20 0x0000000000000000000000000000000000000000`
///
/// See: <https://docs.soliditylang.org/en/latest/contracts.html#call-protection-for-libraries>
const CALL_PROTECTION_BYTECODE_PREFIX: [u8; 21] =
hex!("730000000000000000000000000000000000000000");
/// Subset of [CompactBytecode] excluding sourcemaps.
#[expect(missing_docs)]
#[derive(Debug, Clone)]
pub struct BytecodeData {
pub object: Option<BytecodeObject>,
pub link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>,
pub immutable_references: BTreeMap<String, Vec<Offsets>>,
}
impl BytecodeData {
fn bytes(&self) -> Option<&Bytes> {
self.object.as_ref().and_then(|b| b.as_bytes())
}
}
impl From<CompactBytecode> for BytecodeData {
fn from(bytecode: CompactBytecode) -> Self {
Self {
object: Some(bytecode.object),
link_references: bytecode.link_references,
immutable_references: BTreeMap::new(),
}
}
}
impl From<CompactDeployedBytecode> for BytecodeData {
fn from(bytecode: CompactDeployedBytecode) -> Self {
let (object, link_references) = if let Some(compact) = bytecode.bytecode {
(Some(compact.object), compact.link_references)
} else {
(None, BTreeMap::new())
};
Self { object, link_references, immutable_references: bytecode.immutable_references }
}
}
/// Container for commonly used contract data.
#[derive(Debug)]
pub struct ContractData {
/// Contract name.
pub name: String,
/// Contract ABI.
pub abi: JsonAbi,
/// Contract creation code.
pub bytecode: Option<BytecodeData>,
/// Contract runtime code.
pub deployed_bytecode: Option<BytecodeData>,
/// Contract storage layout, if available.
pub storage_layout: Option<Arc<StorageLayout>>,
}
impl ContractData {
/// Returns reference to bytes of contract creation code, if present.
pub fn bytecode(&self) -> Option<&Bytes> {
self.bytecode.as_ref()?.bytes().filter(|b| !b.is_empty())
}
/// Returns reference to bytes of contract deployed code, if present.
pub fn deployed_bytecode(&self) -> Option<&Bytes> {
self.deployed_bytecode.as_ref()?.bytes().filter(|b| !b.is_empty())
}
/// Returns the bytecode without placeholders, if present.
pub fn bytecode_without_placeholders(&self) -> Option<Bytes> {
strip_bytecode_placeholders(self.bytecode.as_ref()?.object.as_ref()?)
}
/// Returns the deployed bytecode without placeholders, if present.
pub fn deployed_bytecode_without_placeholders(&self) -> Option<Bytes> {
strip_bytecode_placeholders(self.deployed_bytecode.as_ref()?.object.as_ref()?)
}
}
/// Builder for creating a `ContractsByArtifact` instance, optionally including storage layouts
/// from project compile output.
pub struct ContractsByArtifactBuilder<'a> {
/// All compiled artifact bytecodes (borrowed).
artifacts: BTreeMap<ArtifactId, CompactContractBytecodeCow<'a>>,
/// Optionally collected storage layouts for matching artifact IDs.
storage_layouts: BTreeMap<ArtifactId, StorageLayout>,
}
impl<'a> ContractsByArtifactBuilder<'a> {
/// Creates a new builder from artifacts with present bytecode iterator.
pub fn new(
artifacts: impl IntoIterator<Item = (ArtifactId, CompactContractBytecodeCow<'a>)>,
) -> Self {
Self { artifacts: artifacts.into_iter().collect(), storage_layouts: BTreeMap::new() }
}
/// Add storage layouts from the given `ProjectCompileOutput` to known artifacts.
pub fn with_output(self, output: &ProjectCompileOutput, base: &Path) -> Self {
self.with_storage_layouts(output.artifact_ids().filter_map(|(id, artifact)| {
artifact
.storage_layout
.as_ref()
.map(|layout| (id.with_stripped_file_prefixes(base), layout.clone()))
}))
}
/// Add storage layouts.
pub fn with_storage_layouts(
mut self,
layouts: impl IntoIterator<Item = (ArtifactId, StorageLayout)>,
) -> Self {
self.storage_layouts.extend(layouts);
self
}
/// Builds `ContractsByArtifact`.
pub fn build(self) -> ContractsByArtifact {
let map = self
.artifacts
.into_iter()
.filter_map(|(id, artifact)| {
let name = id.name.clone();
let CompactContractBytecodeCow { abi, bytecode, deployed_bytecode } = artifact;
Some((
id.clone(),
ContractData {
name,
abi: abi?.into_owned(),
bytecode: bytecode.map(|b| b.into_owned().into()),
deployed_bytecode: deployed_bytecode.map(|b| b.into_owned().into()),
storage_layout: self.storage_layouts.get(&id).map(|l| Arc::new(l.clone())),
},
))
})
.collect();
ContractsByArtifact(Arc::new(map))
}
}
type ArtifactWithContractRef<'a> = (&'a ArtifactId, &'a ContractData);
/// Wrapper type that maps an artifact to a contract ABI and bytecode.
#[derive(Clone, Default, Debug)]
pub struct ContractsByArtifact(Arc<BTreeMap<ArtifactId, ContractData>>);
impl ContractsByArtifact {
/// Creates a new instance by collecting all artifacts with present bytecode from an iterator.
pub fn new(artifacts: impl IntoIterator<Item = (ArtifactId, CompactContractBytecode)>) -> Self {
let map = artifacts
.into_iter()
.filter_map(|(id, artifact)| {
let name = id.name.clone();
let CompactContractBytecode { abi, bytecode, deployed_bytecode } = artifact;
Some((
id,
ContractData {
name,
abi: abi?,
bytecode: bytecode.map(Into::into),
deployed_bytecode: deployed_bytecode.map(Into::into),
storage_layout: None,
},
))
})
.collect();
Self(Arc::new(map))
}
/// Clears all contracts.
pub fn clear(&mut self) {
*self = Self::default();
}
/// Finds a contract which has a similar bytecode as `code`.
pub fn find_by_creation_code(&self, code: &[u8]) -> Option<ArtifactWithContractRef<'_>> {
self.find_by_code(code, 0.1, true, ContractData::bytecode)
}
/// Finds a contract which has a similar deployed bytecode as `code`.
pub fn find_by_deployed_code(&self, code: &[u8]) -> Option<ArtifactWithContractRef<'_>> {
self.find_by_code(code, 0.15, false, ContractData::deployed_bytecode)
}
/// Finds a contract based on provided bytecode and accepted match score.
/// If strip constructor args flag is true then removes args from bytecode to compare.
fn find_by_code(
&self,
code: &[u8],
accepted_score: f64,
strip_ctor_args: bool,
get: impl Fn(&ContractData) -> Option<&Bytes>,
) -> Option<ArtifactWithContractRef<'_>> {
self.iter()
.filter_map(|(id, contract)| {
if let Some(deployed_bytecode) = get(contract) {
let mut code = code;
if strip_ctor_args && code.len() > deployed_bytecode.len() {
// Try to decode ctor args with contract abi.
if let Some(constructor) = contract.abi.constructor() {
let constructor_args = &code[deployed_bytecode.len()..];
if constructor.abi_decode_input(constructor_args).is_ok() {
// If we can decode args with current abi then remove args from
// code to compare.
code = &code[..deployed_bytecode.len()]
}
}
};
let score = bytecode_diff_score(deployed_bytecode.as_ref(), code);
(score <= accepted_score).then_some((score, (id, contract)))
} else {
None
}
})
.min_by(|(score1, _), (score2, _)| score1.total_cmp(score2))
.map(|(_, data)| data)
}
/// Finds a contract which deployed bytecode exactly matches the given code. Accounts for link
/// references and immutables.
pub fn find_by_deployed_code_exact(&self, code: &[u8]) -> Option<ArtifactWithContractRef<'_>> {
// Immediately return None if the code is empty.
if code.is_empty() {
return None;
}
let mut partial_match = None;
self.iter()
.find(|(id, contract)| {
let Some(deployed_bytecode) = &contract.deployed_bytecode else {
return false;
};
let Some(deployed_code) = &deployed_bytecode.object else {
return false;
};
let len = match deployed_code {
BytecodeObject::Bytecode(bytes) => bytes.len(),
BytecodeObject::Unlinked(bytes) => bytes.len() / 2,
};
if len != code.len() {
return false;
}
// Collect ignored offsets by chaining link and immutable references.
let mut ignored = deployed_bytecode
.immutable_references
.values()
.chain(deployed_bytecode.link_references.values().flat_map(|v| v.values()))
.flatten()
.cloned()
.collect::<Vec<_>>();
// For libraries solidity adds a call protection prefix to the bytecode. We need to
// ignore it as it includes library address determined at runtime.
// See https://docs.soliditylang.org/en/latest/contracts.html#call-protection-for-libraries and
// https://github.com/NomicFoundation/hardhat/blob/af7807cf38842a4f56e7f4b966b806e39631568a/packages/hardhat-verify/src/internal/solc/bytecode.ts#L172
let has_call_protection = match deployed_code {
BytecodeObject::Bytecode(bytes) => {
bytes.starts_with(&CALL_PROTECTION_BYTECODE_PREFIX)
}
BytecodeObject::Unlinked(bytes) => {
if let Ok(bytes) =
Bytes::from_str(&bytes[..CALL_PROTECTION_BYTECODE_PREFIX.len() * 2])
{
bytes.starts_with(&CALL_PROTECTION_BYTECODE_PREFIX)
} else {
false
}
}
};
if has_call_protection {
ignored.push(Offsets { start: 1, length: 20 });
}
let metadata_start = find_metadata_start(code);
if let Some(metadata) = metadata_start {
ignored.push(Offsets {
start: metadata as u32,
length: (code.len() - metadata) as u32,
});
}
ignored.sort_by_key(|o| o.start);
let mut left = 0;
for offset in ignored {
let right = offset.start as usize;
let matched = match deployed_code {
BytecodeObject::Bytecode(bytes) => bytes[left..right] == code[left..right],
BytecodeObject::Unlinked(bytes) => {
if let Ok(bytes) = Bytes::from_str(&bytes[left * 2..right * 2]) {
bytes == code[left..right]
} else {
false
}
}
};
if !matched {
return false;
}
left = right + offset.length as usize;
}
let is_partial = if left < code.len() {
match deployed_code {
BytecodeObject::Bytecode(bytes) => bytes[left..] == code[left..],
BytecodeObject::Unlinked(bytes) => {
if let Ok(bytes) = Bytes::from_str(&bytes[left * 2..]) {
bytes == code[left..]
} else {
false
}
}
}
} else {
true
};
if !is_partial {
return false;
}
let Some(metadata) = metadata_start else { return true };
let exact_match = match deployed_code {
BytecodeObject::Bytecode(bytes) => bytes[metadata..] == code[metadata..],
BytecodeObject::Unlinked(bytes) => {
if let Ok(bytes) = Bytes::from_str(&bytes[metadata * 2..]) {
bytes == code[metadata..]
} else {
false
}
}
};
if exact_match {
true
} else {
partial_match = Some((*id, *contract));
false
}
})
.or(partial_match)
}
/// Finds a contract which has the same contract name or identifier as `id`. If more than one is
/// found, return error.
pub fn find_by_name_or_identifier(
&self,
id: &str,
) -> Result<Option<ArtifactWithContractRef<'_>>> {
let contracts = self
.iter()
.filter(|(artifact, _)| artifact.name == id || artifact.identifier() == id)
.collect::<Vec<_>>();
if contracts.len() > 1 {
eyre::bail!("{id} has more than one implementation.");
}
Ok(contracts.first().copied())
}
/// Finds abi for contract which has the same contract name or identifier as `id`.
pub fn find_abi_by_name_or_identifier(&self, id: &str) -> Option<JsonAbi> {
self.iter()
.find(|(artifact, _)| {
artifact.name.split(".").next().unwrap() == id || artifact.identifier() == id
})
.map(|(_, contract)| contract.abi.clone())
}
/// Finds abi by name or source path
///
/// Returns the abi and the contract name.
pub fn find_abi_by_name_or_src_path(&self, name_or_path: &str) -> Option<(JsonAbi, String)> {
self.iter()
.find(|(artifact, _)| {
artifact.name == name_or_path || artifact.source == Path::new(name_or_path)
})
.map(|(_, contract)| (contract.abi.clone(), contract.name.clone()))
}
/// Flattens the contracts into functions, events and errors.
pub fn flatten(&self) -> (BTreeMap<Selector, Function>, BTreeMap<B256, Event>, JsonAbi) {
let mut funcs = BTreeMap::new();
let mut events = BTreeMap::new();
let mut errors_abi = JsonAbi::new();
for (_name, contract) in self.iter() {
for func in contract.abi.functions() {
funcs.insert(func.selector(), func.clone());
}
for event in contract.abi.events() {
events.insert(event.selector(), event.clone());
}
for error in contract.abi.errors() {
errors_abi.errors.entry(error.name.clone()).or_default().push(error.clone());
}
}
(funcs, events, errors_abi)
}
}
impl From<ProjectCompileOutput> for ContractsByArtifact {
fn from(value: ProjectCompileOutput) -> Self {
Self::new(value.into_artifacts().map(|(id, ar)| {
(
id,
CompactContractBytecode {
abi: ar.abi,
bytecode: ar.bytecode,
deployed_bytecode: ar.deployed_bytecode,
},
)
}))
}
}
impl Deref for ContractsByArtifact {
type Target = BTreeMap<ArtifactId, ContractData>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
/// Wrapper type that maps an address to a contract identifier and contract ABI.
pub type ContractsByAddress = BTreeMap<Address, (String, JsonAbi)>;
/// Very simple fuzzy matching of contract bytecode.
///
/// Returns a value between `0.0` (identical) and `1.0` (completely different).
pub fn bytecode_diff_score<'a>(mut a: &'a [u8], mut b: &'a [u8]) -> f64 {
// Make sure `a` is the longer one.
if a.len() < b.len() {
std::mem::swap(&mut a, &mut b);
}
// Account for different lengths.
let mut n_different_bytes = a.len() - b.len();
// If the difference is more than 32 bytes and more than 10% of the total length,
// we assume the bytecodes are completely different.
// This is a simple heuristic to avoid checking every byte when the lengths are very different.
// 32 is chosen to be a reasonable minimum as it's the size of metadata hashes and one EVM word.
if n_different_bytes > 32 && n_different_bytes * 10 > a.len() {
return 1.0;
}
// Count different bytes.
// SAFETY: `a` is longer than `b`.
n_different_bytes += unsafe { count_different_bytes(a, b) };
n_different_bytes as f64 / a.len() as f64
}
/// Returns the amount of different bytes between two slices.
///
/// # Safety
///
/// `a` must be at least as long as `b`.
unsafe fn count_different_bytes(a: &[u8], b: &[u8]) -> usize {
// This could've been written as `std::iter::zip(a, b).filter(|(x, y)| x != y).count()`,
// however this function is very hot, and has been written to be as primitive as
// possible for lower optimization levels.
let a_ptr = a.as_ptr();
let b_ptr = b.as_ptr();
let len = b.len();
let mut sum = 0;
let mut i = 0;
while i < len {
// SAFETY: `a` is at least as long as `b`, and `i` is in bound of `b`.
sum += unsafe { *a_ptr.add(i) != *b_ptr.add(i) } as usize;
i += 1;
}
sum
}
/// Returns contract name for a given contract identifier.
///
/// Artifact/Contract identifier can take the following form:
/// `<artifact file name>:<contract name>`, the `artifact file name` is the name of the json file of
/// the contract's artifact and the contract name is the name of the solidity contract, like
/// `SafeTransferLibTest.json:SafeTransferLibTest`
///
/// This returns the `contract name` part
///
/// # Example
///
/// ```
/// use foundry_common::*;
/// assert_eq!(
/// "SafeTransferLibTest",
/// get_contract_name("SafeTransferLibTest.json:SafeTransferLibTest")
/// );
/// ```
pub fn get_contract_name(id: &str) -> &str {
id.rsplit(':').next().unwrap_or(id)
}
/// This returns the `file name` part, See [`get_contract_name`]
///
/// # Example
///
/// ```
/// use foundry_common::*;
/// assert_eq!(
/// "SafeTransferLibTest.json",
/// get_file_name("SafeTransferLibTest.json:SafeTransferLibTest")
/// );
/// ```
pub fn get_file_name(id: &str) -> &str {
id.split(':').next().unwrap_or(id)
}
/// Helper function to convert CompactContractBytecode ~> ContractBytecodeSome
pub fn compact_to_contract(contract: CompactContractBytecode) -> Result<ContractBytecodeSome> {
Ok(ContractBytecodeSome {
abi: contract.abi.ok_or_else(|| eyre::eyre!("No contract abi"))?,
bytecode: contract.bytecode.ok_or_else(|| eyre::eyre!("No contract bytecode"))?.into(),
deployed_bytecode: contract
.deployed_bytecode
.ok_or_else(|| eyre::eyre!("No contract deployed bytecode"))?
.into(),
})
}
/// Returns the canonicalized target path for the given identifier.
pub fn find_target_path(project: &Project, identifier: &PathOrContractInfo) -> Result<PathBuf> {
match identifier {
PathOrContractInfo::Path(path) => Ok(canonicalized(project.root().join(path))),
PathOrContractInfo::ContractInfo(info) => {
if let Some(path) = info.path.as_ref() {
let path = canonicalized(project.root().join(path));
let sources = project.sources()?;
let contract_path = sources
.iter()
.find_map(|(src_path, _)| {
if **src_path == path {
return Some(src_path.clone());
}
None
})
.ok_or_else(|| {
eyre::eyre!(
"Could not find source file for contract `{}` at {}",
info.name,
path.strip_prefix(project.root()).unwrap().display()
)
})?;
return Ok(contract_path);
}
// If ContractInfo.path hasn't been provided we try to find the contract using the name.
// This will fail if projects have multiple contracts with the same name. In that case,
// path must be specified.
let path = project.find_contract_path(&info.name)?;
Ok(path)
}
}
}
/// Returns the target artifact given the path and name.
pub fn find_matching_contract_artifact(
output: &mut ProjectCompileOutput,
target_path: &Path,
target_name: Option<&str>,
) -> eyre::Result<ConfigurableContractArtifact> {
if let Some(name) = target_name {
output
.remove(target_path, name)
.ok_or_eyre(format!("Could not find artifact `{name}` in the compiled artifacts"))
} else {
let possible_targets = output
.artifact_ids()
.filter(|(id, _artifact)| id.source == target_path)
.collect::<Vec<_>>();
if possible_targets.is_empty() {
eyre::bail!(
"Could not find artifact linked to source `{target_path:?}` in the compiled artifacts"
);
}
let (target_id, target_artifact) = possible_targets[0].clone();
if possible_targets.len() == 1 {
return Ok(target_artifact.clone());
}
// If all artifact_ids in `possible_targets` have the same name (without ".", indicates
// additional compiler profiles), it means that there are multiple contracts in the
// same file.
if !target_id.name.contains(".")
&& possible_targets.iter().any(|(id, _)| id.name != target_id.name)
{
eyre::bail!(
"Multiple contracts found in the same file, please specify the target <path>:<contract> or <contract>"
);
}
// Otherwise, we're dealing with additional compiler profiles wherein `id.source` is the
// same but `id.path` is different.
let artifact = possible_targets
.iter()
.find_map(|(id, artifact)| if id.profile == "default" { Some(*artifact) } else { None })
.unwrap_or(target_artifact);
Ok(artifact.clone())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn bytecode_diffing() {
assert_eq!(bytecode_diff_score(b"a", b"a"), 0.0);
assert_eq!(bytecode_diff_score(b"a", b"b"), 1.0);
let a_100 = &b"a".repeat(100)[..];
assert_eq!(bytecode_diff_score(a_100, &b"b".repeat(100)), 1.0);
assert_eq!(bytecode_diff_score(a_100, &b"b".repeat(99)), 1.0);
assert_eq!(bytecode_diff_score(a_100, &b"b".repeat(101)), 1.0);
assert_eq!(bytecode_diff_score(a_100, &b"b".repeat(120)), 1.0);
assert_eq!(bytecode_diff_score(a_100, &b"b".repeat(1000)), 1.0);
let a_99 = &b"a".repeat(99)[..];
assert!(bytecode_diff_score(a_100, a_99) <= 0.01);
}
#[test]
fn find_by_deployed_code_exact_with_empty_deployed() {
let contracts = ContractsByArtifact::new(vec![]);
assert!(contracts.find_by_deployed_code_exact(&[]).is_none());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/utils.rs | crates/common/src/utils.rs | //! Uncategorised utilities.
use alloy_primitives::{B256, Bytes, U256, hex, keccak256};
use foundry_compilers::{
Project,
artifacts::{BytecodeObject, SolcLanguage},
error::SolcError,
flatten::{Flattener, FlattenerError},
};
use regex::Regex;
use std::{path::Path, sync::LazyLock};
static BYTECODE_PLACEHOLDER_RE: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"__\$.{34}\$__").expect("invalid regex"));
/// Block on a future using the current tokio runtime on the current thread.
pub fn block_on<F: std::future::Future>(future: F) -> F::Output {
block_on_handle(&tokio::runtime::Handle::current(), future)
}
/// Block on a future using the current tokio runtime on the current thread with the given handle.
pub fn block_on_handle<F: std::future::Future>(
handle: &tokio::runtime::Handle,
future: F,
) -> F::Output {
tokio::task::block_in_place(|| handle.block_on(future))
}
/// Computes the storage slot as specified by `ERC-7201`, using the `erc7201` formula ID.
///
/// This is defined as:
///
/// ```text
/// erc7201(id: string) = keccak256(keccak256(id) - 1) & ~0xff
/// ```
///
/// # Examples
///
/// ```
/// use alloy_primitives::b256;
/// use foundry_common::erc7201;
///
/// assert_eq!(
/// erc7201("example.main"),
/// b256!("0x183a6125c38840424c4a85fa12bab2ab606c4b6d0e7cc73c0c06ba5300eab500"),
/// );
/// ```
pub fn erc7201(id: &str) -> B256 {
let x = U256::from_be_bytes(keccak256(id).0) - U256::from(1);
keccak256(x.to_be_bytes::<32>()) & B256::from(!U256::from(0xff))
}
/// Utility function to find the start of the metadata in the bytecode.
/// This assumes that the metadata is at the end of the bytecode.
pub fn find_metadata_start(bytecode: &[u8]) -> Option<usize> {
// Get the last two bytes of the bytecode to find the length of CBOR metadata.
let (rest, metadata_len_bytes) = bytecode.split_last_chunk()?;
let metadata_len = u16::from_be_bytes(*metadata_len_bytes) as usize;
if metadata_len > rest.len() {
return None;
}
ciborium::from_reader::<ciborium::Value, _>(&rest[rest.len() - metadata_len..])
.is_ok()
.then(|| rest.len() - metadata_len)
}
/// Utility function to ignore metadata hash of the given bytecode.
/// This assumes that the metadata is at the end of the bytecode.
pub fn ignore_metadata_hash(bytecode: &[u8]) -> &[u8] {
if let Some(metadata) = find_metadata_start(bytecode) {
&bytecode[..metadata]
} else {
bytecode
}
}
/// Strips all __$xxx$__ placeholders from the bytecode if it's an unlinked bytecode.
/// by replacing them with 20 zero bytes.
/// This is useful for matching bytecodes to a contract source, and for the source map,
/// in which the actual address of the placeholder isn't important.
pub fn strip_bytecode_placeholders(bytecode: &BytecodeObject) -> Option<Bytes> {
match &bytecode {
BytecodeObject::Bytecode(bytes) => Some(bytes.clone()),
BytecodeObject::Unlinked(s) => {
// Replace all __$xxx$__ placeholders with 20 zero bytes (40 hex chars)
let s = (*BYTECODE_PLACEHOLDER_RE).replace_all(s, "00".repeat(20));
let bytes = hex::decode(s.as_bytes());
Some(bytes.ok()?.into())
}
}
}
/// Flattens the given target of the project. Falls back to the old flattening implementation
/// if the target cannot be compiled successfully. This would be the case if the target has invalid
/// syntax. (e.g. Solang)
pub fn flatten(project: Project, target_path: &Path) -> eyre::Result<String> {
// Save paths for fallback before Flattener::new takes ownership
let paths = project.paths.clone();
let flattened = match Flattener::new(project, target_path) {
Ok(flattener) => Ok(flattener.flatten()),
Err(FlattenerError::Compilation(_)) => {
paths.with_language::<SolcLanguage>().flatten(target_path)
}
Err(FlattenerError::Other(err)) => Err(err),
}
.map_err(|err: SolcError| eyre::eyre!("Failed to flatten: {err}"))?;
Ok(flattened)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/mapping_slots.rs | crates/common/src/mapping_slots.rs | use alloy_primitives::{
B256, U256, keccak256,
map::{AddressHashMap, B256HashMap},
};
use revm::{
bytecode::opcode,
interpreter::{Interpreter, interpreter_types::Jumps},
};
/// Recorded mapping slots.
#[derive(Clone, Debug, Default)]
pub struct MappingSlots {
/// Holds mapping parent (slots => slots)
pub parent_slots: B256HashMap<B256>,
/// Holds mapping key (slots => key)
pub keys: B256HashMap<B256>,
/// Holds mapping child (slots => slots[])
pub children: B256HashMap<Vec<B256>>,
/// Holds the last sha3 result `sha3_result => (data_low, data_high)`, this would only record
/// when sha3 is called with `size == 0x40`, and the lower 256 bits would be stored in
/// `data_low`, higher 256 bits in `data_high`.
/// This is needed for mapping_key detect if the slot is for some mapping and record that.
pub seen_sha3: B256HashMap<(B256, B256)>,
}
impl MappingSlots {
/// Tries to insert a mapping slot. Returns true if it was inserted.
pub fn insert(&mut self, slot: B256) -> bool {
match self.seen_sha3.get(&slot).copied() {
Some((key, parent)) => {
if self.keys.insert(slot, key).is_some() {
return false;
}
self.parent_slots.insert(slot, parent);
self.children.entry(parent).or_default().push(slot);
self.insert(parent);
true
}
None => false,
}
}
}
/// Function to be used in Inspector::step to record mapping slots and keys
#[cold]
pub fn step(mapping_slots: &mut AddressHashMap<MappingSlots>, interpreter: &Interpreter) {
match interpreter.bytecode.opcode() {
opcode::KECCAK256 => {
if interpreter.stack.peek(1) == Ok(U256::from(0x40)) {
let address = interpreter.input.target_address;
let offset = interpreter.stack.peek(0).expect("stack size > 1").saturating_to();
let data = interpreter.memory.slice_len(offset, 0x40);
let low = B256::from_slice(&data[..0x20]);
let high = B256::from_slice(&data[0x20..]);
let result = keccak256(&*data);
mapping_slots.entry(address).or_default().seen_sha3.insert(result, (low, high));
}
}
opcode::SSTORE => {
if let Some(mapping_slots) = mapping_slots.get_mut(&interpreter.input.target_address)
&& let Ok(slot) = interpreter.stack.peek(0)
{
mapping_slots.insert(slot.into());
}
}
_ => {}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/constants.rs | crates/common/src/constants.rs | //! Commonly used constants.
use alloy_eips::Typed2718;
use alloy_network::AnyTxEnvelope;
use alloy_primitives::{Address, B256, Signature, address};
use std::time::Duration;
/// The dev chain-id, inherited from hardhat
pub const DEV_CHAIN_ID: u64 = 31337;
/// The first four bytes of the call data for a function call specifies the function to be called.
pub const SELECTOR_LEN: usize = 4;
/// Maximum size in bytes (0x6000) that a contract can have.
pub const CONTRACT_MAX_SIZE: usize = 24576;
/// Default request timeout for http requests
///
/// Note: this is only used so that connections, that are discarded on the server side won't stay
/// open forever. We assume some nodes may have some backoff baked into them and will delay some
/// responses. This timeout should be a reasonable amount of time to wait for a request.
pub const REQUEST_TIMEOUT: Duration = Duration::from_secs(45);
/// Alchemy free tier cups: <https://docs.alchemy.com/reference/pricing-plans>
pub const ALCHEMY_FREE_TIER_CUPS: u64 = 330;
/// Logged when an error is indicative that the user is trying to fork from a non-archive node.
pub const NON_ARCHIVE_NODE_WARNING: &str = "\
It looks like you're trying to fork from an older block with a non-archive node which is not \
supported. Please try to change your RPC url to an archive node if the issue persists.";
/// Arbitrum L1 sender address of the first transaction in every block.
/// `0x00000000000000000000000000000000000a4b05`
pub const ARBITRUM_SENDER: Address = address!("0x00000000000000000000000000000000000a4b05");
/// The system address, the sender of the first transaction in every block:
/// `0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001`
///
/// See also <https://github.com/ethereum-optimism/optimism/blob/65ec61dde94ffa93342728d324fecf474d228e1f/specs/deposits.md#l1-attributes-deposited-transaction>
pub const OPTIMISM_SYSTEM_ADDRESS: Address = address!("0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001");
/// The system address, the sender of the first transaction in every block:
pub const MONAD_SYSTEM_ADDRESS: Address = address!("0x6f49a8F621353f12378d0046E7d7e4b9B249DC9e");
/// Transaction identifier of System transaction types
pub const SYSTEM_TRANSACTION_TYPE: u8 = 126;
/// Default user agent set as the header for requests that don't specify one.
pub const DEFAULT_USER_AGENT: &str = concat!("foundry/", env!("CARGO_PKG_VERSION"));
/// Prefix for auto-generated type bindings using `forge bind-json`.
pub const TYPE_BINDING_PREFIX: &str = "string constant schema_";
/// Returns whether the sender is a known L2 system sender that is the first tx in every block.
///
/// Transactions from these senders usually don't have a any fee information OR set absurdly high fees that exceed the gas limit (See: <https://github.com/foundry-rs/foundry/pull/10608>)
///
/// See: [ARBITRUM_SENDER], [OPTIMISM_SYSTEM_ADDRESS], [MONAD_SYSTEM_ADDRESS] and [Address::ZERO]
pub fn is_known_system_sender(sender: Address) -> bool {
[ARBITRUM_SENDER, OPTIMISM_SYSTEM_ADDRESS, MONAD_SYSTEM_ADDRESS, Address::ZERO]
.contains(&sender)
}
pub fn is_impersonated_tx(tx: &AnyTxEnvelope) -> bool {
if let AnyTxEnvelope::Ethereum(tx) = tx {
return is_impersonated_sig(tx.signature(), tx.ty());
}
false
}
pub fn is_impersonated_sig(sig: &Signature, ty: u8) -> bool {
let impersonated_sig =
Signature::from_scalars_and_parity(B256::with_last_byte(1), B256::with_last_byte(1), false);
if ty != SYSTEM_TRANSACTION_TYPE
&& (sig == &impersonated_sig || sig.r() == impersonated_sig.r())
{
return true;
}
false
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_constant_sender() {
let arb = address!("0x00000000000000000000000000000000000a4b05");
assert_eq!(arb, ARBITRUM_SENDER);
let base = address!("0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001");
assert_eq!(base, OPTIMISM_SYSTEM_ADDRESS);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/serde_helpers.rs | crates/common/src/serde_helpers.rs | //! Misc Serde helpers for foundry crates.
use alloy_primitives::U256;
use serde::{Deserialize, Deserializer, de};
use std::str::FromStr;
/// Helper type to parse both `u64` and `U256`
#[derive(Copy, Clone, Deserialize)]
#[serde(untagged)]
pub enum Numeric {
/// A [U256] value.
U256(U256),
/// A `u64` value.
Num(u64),
}
impl From<Numeric> for U256 {
fn from(n: Numeric) -> Self {
match n {
Numeric::U256(n) => n,
Numeric::Num(n) => Self::from(n),
}
}
}
impl FromStr for Numeric {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(val) = s.parse::<u128>() {
Ok(Self::U256(U256::from(val)))
} else if s.starts_with("0x") {
U256::from_str_radix(s, 16).map(Numeric::U256).map_err(|err| err.to_string())
} else {
U256::from_str(s).map(Numeric::U256).map_err(|err| err.to_string())
}
}
}
/// Deserializes the input into an `Option<U256>`, using [`from_int_or_hex`] to deserialize the
/// inner value.
pub fn from_int_or_hex_opt<'de, D>(deserializer: D) -> Result<Option<U256>, D::Error>
where
D: Deserializer<'de>,
{
match Option::<NumberOrHexU256>::deserialize(deserializer)? {
Some(val) => val.try_into_u256().map(Some),
None => Ok(None),
}
}
/// An enum that represents either a [serde_json::Number] integer, or a hex [U256].
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub enum NumberOrHexU256 {
/// An integer
Int(serde_json::Number),
/// A hex U256
Hex(U256),
}
impl NumberOrHexU256 {
/// Tries to convert this into a [U256]].
pub fn try_into_u256<E: de::Error>(self) -> Result<U256, E> {
match self {
Self::Int(num) => U256::from_str(&num.to_string()).map_err(E::custom),
Self::Hex(val) => Ok(val),
}
}
}
/// Deserializes the input into a U256, accepting both 0x-prefixed hex and decimal strings with
/// arbitrary precision, defined by serde_json's [`Number`](serde_json::Number).
pub fn from_int_or_hex<'de, D>(deserializer: D) -> Result<U256, D::Error>
where
D: Deserializer<'de>,
{
NumberOrHexU256::deserialize(deserializer)?.try_into_u256()
}
/// Helper type to deserialize sequence of numbers
#[derive(Deserialize)]
#[serde(untagged)]
pub enum NumericSeq {
/// Single parameter sequence (e.g `[1]`).
Seq([Numeric; 1]),
/// `U256`.
U256(U256),
/// Native `u64`.
Num(u64),
}
/// Deserializes a number from hex or int
pub fn deserialize_number<'de, D>(deserializer: D) -> Result<U256, D::Error>
where
D: Deserializer<'de>,
{
Numeric::deserialize(deserializer).map(Into::into)
}
/// Deserializes a number from hex or int, but optionally
pub fn deserialize_number_opt<'de, D>(deserializer: D) -> Result<Option<U256>, D::Error>
where
D: Deserializer<'de>,
{
let num = match Option::<Numeric>::deserialize(deserializer)? {
Some(Numeric::U256(n)) => Some(n),
Some(Numeric::Num(n)) => Some(U256::from(n)),
_ => None,
};
Ok(num)
}
/// Deserializes single integer params: `1, [1], ["0x01"]`
pub fn deserialize_number_seq<'de, D>(deserializer: D) -> Result<U256, D::Error>
where
D: Deserializer<'de>,
{
let num = match NumericSeq::deserialize(deserializer)? {
NumericSeq::Seq(seq) => seq[0].into(),
NumericSeq::U256(n) => n,
NumericSeq::Num(n) => U256::from(n),
};
Ok(num)
}
pub mod duration {
use serde::{Deserialize, Deserializer};
use std::time::Duration;
pub fn serialize<S>(duration: &Duration, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let d = jiff::SignedDuration::try_from(*duration).map_err(serde::ser::Error::custom)?;
serializer.serialize_str(&format!("{d:#}"))
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Duration, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let d = s.parse::<jiff::SignedDuration>().map_err(serde::de::Error::custom)?;
d.try_into().map_err(serde::de::Error::custom)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/traits.rs | crates/common/src/traits.rs | //! Commonly used traits.
use alloy_json_abi::Function;
use alloy_primitives::Bytes;
use alloy_sol_types::SolError;
use std::{fmt, path::Path};
/// Test filter.
pub trait TestFilter: Send + Sync {
/// Returns whether the test should be included.
fn matches_test(&self, test_signature: &str) -> bool;
/// Returns whether the contract should be included.
fn matches_contract(&self, contract_name: &str) -> bool;
/// Returns a contract with the given path should be included.
fn matches_path(&self, path: &Path) -> bool;
}
impl<'a> dyn TestFilter + 'a {
/// Returns `true` if the function is a test function that matches the given filter.
pub fn matches_test_function(&self, func: &Function) -> bool {
func.is_any_test() && self.matches_test(&func.signature())
}
}
/// A test filter that filters out nothing.
#[derive(Clone, Debug, Default)]
pub struct EmptyTestFilter(());
impl TestFilter for EmptyTestFilter {
fn matches_test(&self, _test_signature: &str) -> bool {
true
}
fn matches_contract(&self, _contract_name: &str) -> bool {
true
}
fn matches_path(&self, _path: &Path) -> bool {
true
}
}
/// Extension trait for `Function`.
pub trait TestFunctionExt {
/// Returns the kind of test function.
fn test_function_kind(&self) -> TestFunctionKind {
TestFunctionKind::classify(self.tfe_as_str(), self.tfe_has_inputs())
}
/// Returns `true` if this function is a `setUp` function.
fn is_setup(&self) -> bool {
self.test_function_kind().is_setup()
}
/// Returns `true` if this function is a unit, fuzz, or invariant test.
fn is_any_test(&self) -> bool {
self.test_function_kind().is_any_test()
}
/// Returns `true` if this function is a test that should fail.
fn is_any_test_fail(&self) -> bool {
self.test_function_kind().is_any_test_fail()
}
/// Returns `true` if this function is a unit test.
fn is_unit_test(&self) -> bool {
matches!(self.test_function_kind(), TestFunctionKind::UnitTest { .. })
}
/// Returns `true` if this function is a `beforeTestSetup` function.
fn is_before_test_setup(&self) -> bool {
self.tfe_as_str().eq_ignore_ascii_case("beforetestsetup")
}
/// Returns `true` if this function is a fuzz test.
fn is_fuzz_test(&self) -> bool {
self.test_function_kind().is_fuzz_test()
}
/// Returns `true` if this function is an invariant test.
fn is_invariant_test(&self) -> bool {
self.test_function_kind().is_invariant_test()
}
/// Returns `true` if this function is an `afterInvariant` function.
fn is_after_invariant(&self) -> bool {
self.test_function_kind().is_after_invariant()
}
/// Returns `true` if this function is a `fixture` function.
fn is_fixture(&self) -> bool {
self.test_function_kind().is_fixture()
}
/// Returns `true` if this function is test reserved function.
fn is_reserved(&self) -> bool {
self.is_any_test()
|| self.is_setup()
|| self.is_before_test_setup()
|| self.is_after_invariant()
|| self.is_fixture()
}
#[doc(hidden)]
fn tfe_as_str(&self) -> &str;
#[doc(hidden)]
fn tfe_has_inputs(&self) -> bool;
}
impl TestFunctionExt for Function {
fn tfe_as_str(&self) -> &str {
self.name.as_str()
}
fn tfe_has_inputs(&self) -> bool {
!self.inputs.is_empty()
}
}
impl TestFunctionExt for String {
fn tfe_as_str(&self) -> &str {
self
}
fn tfe_has_inputs(&self) -> bool {
false
}
}
impl TestFunctionExt for str {
fn tfe_as_str(&self) -> &str {
self
}
fn tfe_has_inputs(&self) -> bool {
false
}
}
/// Test function kind.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum TestFunctionKind {
/// `setUp`.
Setup,
/// `test*`. `should_fail` is `true` for `testFail*`.
UnitTest { should_fail: bool },
/// `test*`, with arguments. `should_fail` is `true` for `testFail*`.
FuzzTest { should_fail: bool },
/// `invariant*` or `statefulFuzz*`.
InvariantTest,
/// `table*`, with arguments.
TableTest,
/// `afterInvariant`.
AfterInvariant,
/// `fixture*`.
Fixture,
/// Unknown kind.
Unknown,
}
impl TestFunctionKind {
/// Classify a function.
pub fn classify(name: &str, has_inputs: bool) -> Self {
match () {
_ if name.starts_with("test") => {
let should_fail = name.starts_with("testFail");
if has_inputs {
Self::FuzzTest { should_fail }
} else {
Self::UnitTest { should_fail }
}
}
_ if name.starts_with("invariant") || name.starts_with("statefulFuzz") => {
Self::InvariantTest
}
_ if name.starts_with("table") => Self::TableTest,
_ if name.eq_ignore_ascii_case("setup") => Self::Setup,
_ if name.eq_ignore_ascii_case("afterinvariant") => Self::AfterInvariant,
_ if name.starts_with("fixture") => Self::Fixture,
_ => Self::Unknown,
}
}
/// Returns the name of the function kind.
pub const fn name(&self) -> &'static str {
match self {
Self::Setup => "setUp",
Self::UnitTest { should_fail: false } => "test",
Self::UnitTest { should_fail: true } => "testFail",
Self::FuzzTest { should_fail: false } => "fuzz",
Self::FuzzTest { should_fail: true } => "fuzz fail",
Self::InvariantTest => "invariant",
Self::TableTest => "table",
Self::AfterInvariant => "afterInvariant",
Self::Fixture => "fixture",
Self::Unknown => "unknown",
}
}
/// Returns `true` if this function is a `setUp` function.
#[inline]
pub const fn is_setup(&self) -> bool {
matches!(self, Self::Setup)
}
/// Returns `true` if this function is a unit, fuzz, or invariant test.
#[inline]
pub const fn is_any_test(&self) -> bool {
matches!(
self,
Self::UnitTest { .. } | Self::FuzzTest { .. } | Self::TableTest | Self::InvariantTest
)
}
/// Returns `true` if this function is a test that should fail.
#[inline]
pub const fn is_any_test_fail(&self) -> bool {
matches!(self, Self::UnitTest { should_fail: true } | Self::FuzzTest { should_fail: true })
}
/// Returns `true` if this function is a unit test.
#[inline]
pub fn is_unit_test(&self) -> bool {
matches!(self, Self::UnitTest { .. })
}
/// Returns `true` if this function is a fuzz test.
#[inline]
pub const fn is_fuzz_test(&self) -> bool {
matches!(self, Self::FuzzTest { .. })
}
/// Returns `true` if this function is an invariant test.
#[inline]
pub const fn is_invariant_test(&self) -> bool {
matches!(self, Self::InvariantTest)
}
/// Returns `true` if this function is a table test.
#[inline]
pub const fn is_table_test(&self) -> bool {
matches!(self, Self::TableTest)
}
/// Returns `true` if this function is an `afterInvariant` function.
#[inline]
pub const fn is_after_invariant(&self) -> bool {
matches!(self, Self::AfterInvariant)
}
/// Returns `true` if this function is a `fixture` function.
#[inline]
pub const fn is_fixture(&self) -> bool {
matches!(self, Self::Fixture)
}
/// Returns `true` if this function kind is known.
#[inline]
pub const fn is_known(&self) -> bool {
!matches!(self, Self::Unknown)
}
/// Returns `true` if this function kind is unknown.
#[inline]
pub const fn is_unknown(&self) -> bool {
matches!(self, Self::Unknown)
}
}
impl fmt::Display for TestFunctionKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.name().fmt(f)
}
}
/// An extension trait for `std::error::Error` for ABI encoding.
pub trait ErrorExt: std::error::Error {
/// ABI-encodes the error using `Revert(string)`.
fn abi_encode_revert(&self) -> Bytes;
}
impl<T: std::error::Error> ErrorExt for T {
fn abi_encode_revert(&self) -> Bytes {
alloy_sol_types::Revert::from(self.to_string()).abi_encode().into()
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/retry.rs | crates/common/src/retry.rs | //! Retry utilities.
use eyre::{Error, Report, Result};
use std::time::Duration;
/// Error type for Retry.
#[derive(Debug, thiserror::Error)]
pub enum RetryError<E = Report> {
/// Continues operation without decrementing retries.
Continue(E),
/// Keeps retrying operation.
Retry(E),
/// Stops retrying operation immediately.
Break(E),
}
/// A type that keeps track of attempts.
#[derive(Clone, Debug)]
pub struct Retry {
retries: u32,
delay: Duration,
}
impl Retry {
/// Creates a new `Retry` instance.
pub fn new(retries: u32, delay: Duration) -> Self {
Self { retries, delay }
}
/// Creates a new `Retry` instance with no delay between retries.
pub fn new_no_delay(retries: u32) -> Self {
Self::new(retries, Duration::ZERO)
}
/// Runs the given closure in a loop, retrying if it fails up to the specified number of times.
pub fn run<F: FnMut() -> Result<T>, T>(mut self, mut callback: F) -> Result<T> {
loop {
match callback() {
Err(e) if self.retries > 0 => {
self.handle_err(e);
if !self.delay.is_zero() {
std::thread::sleep(self.delay);
}
}
res => return res,
}
}
}
/// Runs the given async closure in a loop, retrying if it fails up to the specified number of
/// times.
pub async fn run_async<F, Fut, T>(mut self, mut callback: F) -> Result<T>
where
F: FnMut() -> Fut,
Fut: Future<Output = Result<T>>,
{
loop {
match callback().await {
Err(e) if self.retries > 0 => {
self.handle_err(e);
if !self.delay.is_zero() {
tokio::time::sleep(self.delay).await;
}
}
res => return res,
};
}
}
/// Runs the given async closure in a loop, retrying if it fails up to the specified number of
/// times or immediately returning an error if the closure returned [`RetryError::Break`].
pub async fn run_async_until_break<F, Fut, T>(mut self, mut callback: F) -> Result<T>
where
F: FnMut() -> Fut,
Fut: Future<Output = Result<T, RetryError>>,
{
loop {
match callback().await {
Err(RetryError::Continue(e)) => {
self.log(e, false);
if !self.delay.is_zero() {
tokio::time::sleep(self.delay).await;
}
}
Err(RetryError::Retry(e)) if self.retries > 0 => {
self.handle_err(e);
if !self.delay.is_zero() {
tokio::time::sleep(self.delay).await;
}
}
Err(RetryError::Retry(e) | RetryError::Break(e)) => return Err(e),
Ok(t) => return Ok(t),
};
}
}
fn handle_err(&mut self, err: Error) {
debug_assert!(self.retries > 0);
self.retries -= 1;
self.log(err, true);
}
fn log(&self, err: Error, warn: bool) {
let msg = format!(
"{err}{delay} ({retries} tries remaining)",
delay = if self.delay.is_zero() {
String::new()
} else {
format!("; waiting {} seconds before trying again", self.delay.as_secs())
},
retries = self.retries,
);
if warn {
let _ = sh_warn!("{msg}");
} else {
tracing::info!("{msg}");
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/term.rs | crates/common/src/term.rs | //! terminal utils
use foundry_compilers::{
artifacts::remappings::Remapping,
report::{self, BasicStdoutReporter, Reporter},
};
use itertools::Itertools;
use semver::Version;
use std::{
io,
io::{IsTerminal, prelude::*},
path::{Path, PathBuf},
sync::{
LazyLock,
mpsc::{self, TryRecvError},
},
thread,
time::Duration,
};
use yansi::Paint;
use crate::shell;
/// Some spinners
// https://github.com/gernest/wow/blob/master/spin/spinners.go
pub static SPINNERS: &[&[&str]] = &[
&["⠃", "⠊", "⠒", "⠢", "⠆", "⠰", "⠔", "⠒", "⠑", "⠘"],
&[" ", "⠁", "⠉", "⠙", "⠚", "⠖", "⠦", "⠤", "⠠"],
&["┤", "┘", "┴", "└", "├", "┌", "┬", "┐"],
&["▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸"],
&[" ", "▘", "▀", "▜", "█", "▟", "▄", "▖"],
];
static TERM_SETTINGS: LazyLock<TermSettings> = LazyLock::new(TermSettings::from_env);
/// Helper type to determine the current tty
pub struct TermSettings {
indicate_progress: bool,
}
impl TermSettings {
/// Returns a new [`TermSettings`], configured from the current environment.
pub fn from_env() -> Self {
Self { indicate_progress: std::io::stdout().is_terminal() }
}
}
#[expect(missing_docs)]
pub struct Spinner {
indicator: &'static [&'static str],
no_progress: bool,
message: String,
idx: usize,
}
#[expect(missing_docs)]
impl Spinner {
pub fn new(msg: impl Into<String>) -> Self {
Self::with_indicator(SPINNERS[0], msg)
}
pub fn with_indicator(indicator: &'static [&'static str], msg: impl Into<String>) -> Self {
Self {
indicator,
no_progress: !TERM_SETTINGS.indicate_progress,
message: msg.into(),
idx: 0,
}
}
pub fn tick(&mut self) {
if self.no_progress {
return;
}
let indicator = self.indicator[self.idx % self.indicator.len()].green();
let indicator = Paint::new(format!("[{indicator}]")).bold();
let _ = sh_print!("\r\x1B[2K\r{indicator} {}", self.message);
io::stdout().flush().unwrap();
self.idx = self.idx.wrapping_add(1);
}
pub fn message(&mut self, msg: impl Into<String>) {
self.message = msg.into();
}
}
/// A spinner used as [`report::Reporter`]
///
/// This reporter will prefix messages with a spinning cursor
#[derive(Debug)]
#[must_use = "Terminates the spinner on drop"]
pub struct SpinnerReporter {
/// The sender to the spinner thread.
sender: mpsc::Sender<SpinnerMsg>,
/// The project root path for trimming file paths in verbose output.
project_root: Option<PathBuf>,
}
impl SpinnerReporter {
/// Spawns the [`Spinner`] on a new thread
///
/// The spinner's message will be updated via the `reporter` events
///
/// On drop the channel will disconnect and the thread will terminate
pub fn spawn(project_root: Option<PathBuf>) -> Self {
let (sender, rx) = mpsc::channel::<SpinnerMsg>();
std::thread::Builder::new()
.name("spinner".into())
.spawn(move || {
let mut spinner = Spinner::new("Compiling...");
loop {
spinner.tick();
match rx.try_recv() {
Ok(SpinnerMsg::Msg(msg)) => {
spinner.message(msg);
// new line so past messages are not overwritten
let _ = sh_println!();
}
Ok(SpinnerMsg::Shutdown(ack)) => {
// end with a newline
let _ = sh_println!();
let _ = ack.send(());
break;
}
Err(TryRecvError::Disconnected) => break,
Err(TryRecvError::Empty) => thread::sleep(Duration::from_millis(100)),
}
}
})
.expect("failed to spawn thread");
Self { sender, project_root }
}
fn send_msg(&self, msg: impl Into<String>) {
let _ = self.sender.send(SpinnerMsg::Msg(msg.into()));
}
}
enum SpinnerMsg {
Msg(String),
Shutdown(mpsc::Sender<()>),
}
impl Drop for SpinnerReporter {
fn drop(&mut self) {
let (tx, rx) = mpsc::channel();
if self.sender.send(SpinnerMsg::Shutdown(tx)).is_ok() {
let _ = rx.recv();
}
}
}
impl Reporter for SpinnerReporter {
fn on_compiler_spawn(&self, compiler_name: &str, version: &Version, dirty_files: &[PathBuf]) {
// Verbose message with dirty files displays first to avoid being overlapped
// by the spinner in .tick() which prints repeatedly over the same line.
if shell::verbosity() >= 5 {
self.send_msg(format!(
"Files to compile:\n{}",
dirty_files
.iter()
.map(|path| {
let trimmed_path = if let Some(project_root) = &self.project_root {
path.strip_prefix(project_root).unwrap_or(path)
} else {
path
};
format!("- {}", trimmed_path.display())
})
.sorted()
.format("\n")
));
}
self.send_msg(format!(
"Compiling {} files with {} {}.{}.{}",
dirty_files.len(),
compiler_name,
version.major,
version.minor,
version.patch
));
}
fn on_compiler_success(&self, compiler_name: &str, version: &Version, duration: &Duration) {
self.send_msg(format!(
"{} {}.{}.{} finished in {duration:.2?}",
compiler_name, version.major, version.minor, version.patch
));
}
fn on_solc_installation_start(&self, version: &Version) {
self.send_msg(format!("Installing Solc version {version}"));
}
fn on_solc_installation_success(&self, version: &Version) {
self.send_msg(format!("Successfully installed Solc {version}"));
}
fn on_solc_installation_error(&self, version: &Version, error: &str) {
self.send_msg(format!("Failed to install Solc {version}: {error}").red().to_string());
}
fn on_unresolved_imports(&self, imports: &[(&Path, &Path)], remappings: &[Remapping]) {
self.send_msg(report::format_unresolved_imports(imports, remappings));
}
}
/// If the output medium is terminal, this calls `f` within the [`SpinnerReporter`] that displays a
/// spinning cursor to display solc progress.
///
/// If no terminal is available this falls back to common `println!` in [`BasicStdoutReporter`].
pub fn with_spinner_reporter<T>(project_root: Option<PathBuf>, f: impl FnOnce() -> T) -> T {
let reporter = if TERM_SETTINGS.indicate_progress {
report::Report::new(SpinnerReporter::spawn(project_root))
} else {
report::Report::new(BasicStdoutReporter::default())
};
report::with_scoped(&reporter, f)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[ignore]
fn can_spin() {
let mut s = Spinner::new("Compiling".to_string());
let ticks = 50;
for _ in 0..ticks {
std::thread::sleep(std::time::Duration::from_millis(100));
s.tick();
}
}
#[test]
fn can_format_properly() {
let r = SpinnerReporter::spawn(None);
let remappings: Vec<Remapping> = vec![
"library/=library/src/".parse().unwrap(),
"weird-erc20/=lib/weird-erc20/src/".parse().unwrap(),
"ds-test/=lib/ds-test/src/".parse().unwrap(),
"openzeppelin-contracts/=lib/openzeppelin-contracts/contracts/".parse().unwrap(),
];
let unresolved = vec![(Path::new("./src/Import.sol"), Path::new("src/File.col"))];
r.on_unresolved_imports(&unresolved, &remappings);
// formats:
// [⠒] Unable to resolve imports:
// "./src/Import.sol" in "src/File.col"
// with remappings:
// library/=library/src/
// weird-erc20/=lib/weird-erc20/src/
// ds-test/=lib/ds-test/src/
// openzeppelin-contracts/=lib/openzeppelin-contracts/contracts/
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/iter.rs | crates/common/src/iter.rs | use std::iter::Peekable;
pub struct Delimited<I: Iterator> {
is_first: bool,
iter: Peekable<I>,
}
pub trait IterDelimited: Iterator + Sized {
fn delimited(self) -> Delimited<Self> {
Delimited { is_first: true, iter: self.peekable() }
}
}
impl<I: Iterator> IterDelimited for I {}
pub struct IteratorPosition {
pub is_first: bool,
pub is_last: bool,
}
impl<I: Iterator> Iterator for Delimited<I> {
type Item = (IteratorPosition, I::Item);
fn next(&mut self) -> Option<Self::Item> {
let item = self.iter.next()?;
let position =
IteratorPosition { is_first: self.is_first, is_last: self.iter.peek().is_none() };
self.is_first = false;
Some((position, item))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/errors/fs.rs | crates/common/src/errors/fs.rs | use std::{
io,
path::{Path, PathBuf},
};
/// Various error variants for `fs` operations that serve as an addition to the io::Error which
/// does not provide any information about the path.
#[derive(Debug, thiserror::Error)]
#[expect(missing_docs)]
pub enum FsPathError {
/// Provides additional path context for [`std::fs::write`].
#[error("failed to write to {path:?}: {source}")]
Write { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::read`].
#[error("failed to read from {path:?}: {source}")]
Read { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::copy`].
#[error("failed to copy from {from:?} to {to:?}: {source}")]
Copy { source: io::Error, from: PathBuf, to: PathBuf },
/// Provides additional path context for [`std::fs::read_link`].
#[error("failed to read from {path:?}: {source}")]
ReadLink { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::File::create`].
#[error("failed to create file {path:?}: {source}")]
CreateFile { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::remove_file`].
#[error("failed to remove file {path:?}: {source}")]
RemoveFile { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::create_dir`].
#[error("failed to create dir {path:?}: {source}")]
CreateDir { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::remove_dir`].
#[error("failed to remove dir {path:?}: {source}")]
RemoveDir { source: io::Error, path: PathBuf },
/// Provides additional path context for [`std::fs::File::open`].
#[error("failed to open file {path:?}: {source}")]
Open { source: io::Error, path: PathBuf },
#[error("failed to lock file {path:?}: {source}")]
Lock { source: io::Error, path: PathBuf },
#[error("failed to unlock file {path:?}: {source}")]
Unlock { source: io::Error, path: PathBuf },
/// Provides additional path context for the file whose contents should be parsed as JSON.
#[error("failed to parse json file: {path:?}: {source}")]
ReadJson { source: serde_json::Error, path: PathBuf },
/// Provides additional path context for the new JSON file.
#[error("failed to write to json file: {path:?}: {source}")]
WriteJson { source: serde_json::Error, path: PathBuf },
}
impl FsPathError {
/// Returns the complementary error variant for [`std::fs::write`].
pub fn write(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::Write { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::read`].
pub fn read(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::Read { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::copy`].
pub fn copy(source: io::Error, from: impl Into<PathBuf>, to: impl Into<PathBuf>) -> Self {
Self::Copy { source, from: from.into(), to: to.into() }
}
/// Returns the complementary error variant for [`std::fs::read_link`].
pub fn read_link(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::ReadLink { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::File::create`].
pub fn create_file(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::CreateFile { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::remove_file`].
pub fn remove_file(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::RemoveFile { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::create_dir`].
pub fn create_dir(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::CreateDir { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::remove_dir`].
pub fn remove_dir(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::RemoveDir { source, path: path.into() }
}
/// Returns the complementary error variant for [`std::fs::File::open`].
pub fn open(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::Open { source, path: path.into() }
}
/// Returns the complementary error variant when locking a file.
pub fn lock(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::Lock { source, path: path.into() }
}
/// Returns the complementary error variant when unlocking a file.
pub fn unlock(source: io::Error, path: impl Into<PathBuf>) -> Self {
Self::Unlock { source, path: path.into() }
}
}
impl AsRef<Path> for FsPathError {
fn as_ref(&self) -> &Path {
match self {
Self::Write { path, .. }
| Self::Read { path, .. }
| Self::ReadLink { path, .. }
| Self::Copy { from: path, .. }
| Self::CreateDir { path, .. }
| Self::RemoveDir { path, .. }
| Self::CreateFile { path, .. }
| Self::RemoveFile { path, .. }
| Self::Open { path, .. }
| Self::Lock { path, .. }
| Self::Unlock { path, .. }
| Self::ReadJson { path, .. }
| Self::WriteJson { path, .. } => path,
}
}
}
impl From<FsPathError> for io::Error {
fn from(value: FsPathError) -> Self {
match value {
FsPathError::Write { source, .. }
| FsPathError::Read { source, .. }
| FsPathError::ReadLink { source, .. }
| FsPathError::Copy { source, .. }
| FsPathError::CreateDir { source, .. }
| FsPathError::RemoveDir { source, .. }
| FsPathError::CreateFile { source, .. }
| FsPathError::RemoveFile { source, .. }
| FsPathError::Open { source, .. }
| FsPathError::Lock { source, .. }
| FsPathError::Unlock { source, .. } => source,
FsPathError::ReadJson { source, .. } | FsPathError::WriteJson { source, .. } => {
source.into()
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/errors/mod.rs | crates/common/src/errors/mod.rs | //! Commonly used errors
mod fs;
pub use fs::FsPathError;
mod private {
use eyre::Chain;
use std::error::Error;
pub trait ErrorChain {
fn chain(&self) -> Chain<'_>;
}
impl ErrorChain for dyn Error + 'static {
fn chain(&self) -> Chain<'_> {
Chain::new(self)
}
}
impl ErrorChain for eyre::Report {
fn chain(&self) -> Chain<'_> {
self.chain()
}
}
}
/// Displays a chain of errors in a single line.
pub fn display_chain<E: private::ErrorChain + ?Sized>(error: &E) -> String {
dedup_chain(error).join("; ")
}
/// Deduplicates a chain of errors.
pub fn dedup_chain<E: private::ErrorChain + ?Sized>(error: &E) -> Vec<String> {
let mut causes = all_sources(error);
// Deduplicate the common pattern `msg1: msg2; msg2` -> `msg1: msg2`.
causes.dedup_by(|b, a| a.contains(b.as_str()));
causes
}
fn all_sources<E: private::ErrorChain + ?Sized>(err: &E) -> Vec<String> {
err.chain().map(|cause| cause.to_string().trim().to_string()).collect()
}
/// Converts solar errors to an eyre error.
pub fn convert_solar_errors(dcx: &solar::interface::diagnostics::DiagCtxt) -> eyre::Result<()> {
match dcx.emitted_errors() {
Some(Ok(())) => Ok(()),
Some(Err(e)) if !e.is_empty() => eyre::bail!("solar run failed:\n\n{e}"),
_ if dcx.has_errors().is_err() => eyre::bail!("solar run failed"),
_ => Ok(()),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn dedups_contained() {
#[derive(thiserror::Error, Debug)]
#[error("my error: {0}")]
struct A(#[from] B);
#[derive(thiserror::Error, Debug)]
#[error("{0}")]
struct B(String);
let ee = eyre::Report::from(A(B("hello".into())));
assert_eq!(ee.chain().count(), 2, "{ee:?}");
let full = all_sources(&ee).join("; ");
assert_eq!(full, "my error: hello; hello");
let chained = display_chain(&ee);
assert_eq!(chained, "my error: hello");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/preprocessor/deps.rs | crates/common/src/preprocessor/deps.rs | use super::{
data::{ContractData, PreprocessorData},
span_to_range,
};
use foundry_compilers::Updates;
use itertools::Itertools;
use solar::sema::{
Gcx, Hir,
hir::{CallArgs, ContractId, Expr, ExprKind, NamedArg, Stmt, StmtKind, TypeKind, Visit},
interface::{SourceMap, data_structures::Never, source_map::FileName},
};
use std::{
collections::{BTreeMap, BTreeSet, HashSet},
ops::{ControlFlow, Range},
path::{Path, PathBuf},
};
/// Holds data about referenced source contracts and bytecode dependencies.
pub(crate) struct PreprocessorDependencies {
// Mapping contract id to preprocess -> contract bytecode dependencies.
pub preprocessed_contracts: BTreeMap<ContractId, Vec<BytecodeDependency>>,
// Referenced contract ids.
pub referenced_contracts: HashSet<ContractId>,
}
impl PreprocessorDependencies {
pub fn new(
gcx: Gcx<'_>,
paths: &[PathBuf],
src_dir: &Path,
root_dir: &Path,
mocks: &mut HashSet<PathBuf>,
) -> Self {
let mut preprocessed_contracts = BTreeMap::new();
let mut referenced_contracts = HashSet::new();
let mut current_mocks = HashSet::new();
// Helper closure for iterating candidate contracts to preprocess (tests and scripts).
let candidate_contracts = || {
gcx.hir.contract_ids().filter_map(|id| {
let contract = gcx.hir.contract(id);
let source = gcx.hir.source(contract.source);
let FileName::Real(path) = &source.file.name else {
return None;
};
if !paths.contains(path) {
trace!("{} is not test or script", path.display());
return None;
}
Some((id, contract, source, path))
})
};
// Collect current mocks.
for (_, contract, _, path) in candidate_contracts() {
if contract.linearized_bases.iter().any(|base_id| {
let base = gcx.hir.contract(*base_id);
matches!(
&gcx.hir.source(base.source).file.name,
FileName::Real(base_path) if base_path.starts_with(src_dir)
)
}) {
let mock_path = root_dir.join(path);
trace!("found mock contract {}", mock_path.display());
current_mocks.insert(mock_path);
}
}
// Collect dependencies for non-mock test/script contracts.
for (contract_id, contract, source, path) in candidate_contracts() {
let full_path = root_dir.join(path);
if current_mocks.contains(&full_path) {
trace!("{} is a mock, skipping", path.display());
continue;
}
// Make sure current contract is not in list of mocks (could happen when a contract
// which used to be a mock is refactored to a non-mock implementation).
mocks.remove(&full_path);
let mut deps_collector =
BytecodeDependencyCollector::new(gcx, source.file.src.as_str(), src_dir);
// Analyze current contract.
let _ = deps_collector.walk_contract(contract);
// Ignore empty test contracts declared in source files with other contracts.
if !deps_collector.dependencies.is_empty() {
preprocessed_contracts.insert(contract_id, deps_collector.dependencies);
}
// Record collected referenced contract ids.
referenced_contracts.extend(deps_collector.referenced_contracts);
}
// Add current mocks.
mocks.extend(current_mocks);
Self { preprocessed_contracts, referenced_contracts }
}
}
/// Represents a bytecode dependency kind.
#[derive(Debug)]
enum BytecodeDependencyKind {
/// `type(Contract).creationCode`
CreationCode,
/// `new Contract`.
New {
/// Contract name.
name: String,
/// Constructor args length.
args_length: usize,
/// Constructor call args offset.
call_args_offset: usize,
/// `msg.value` (if any) used when creating contract.
value: Option<String>,
/// `salt` (if any) used when creating contract.
salt: Option<String>,
/// Whether it's a try contract creation statement, with custom return.
try_stmt: Option<bool>,
},
}
/// Represents a single bytecode dependency.
#[derive(Debug)]
pub(crate) struct BytecodeDependency {
/// Dependency kind.
kind: BytecodeDependencyKind,
/// Source map location of this dependency.
loc: Range<usize>,
/// HIR id of referenced contract.
referenced_contract: ContractId,
}
/// Walks over contract HIR and collects [`BytecodeDependency`]s and referenced contracts.
struct BytecodeDependencyCollector<'gcx, 'src> {
/// Source map, used for determining contract item locations.
gcx: Gcx<'gcx>,
/// Source content of current contract.
src: &'src str,
/// Project source dir, used to determine if referenced contract is a source contract.
src_dir: &'src Path,
/// Dependencies collected for current contract.
dependencies: Vec<BytecodeDependency>,
/// Unique HIR ids of contracts referenced from current contract.
referenced_contracts: HashSet<ContractId>,
}
impl<'gcx, 'src> BytecodeDependencyCollector<'gcx, 'src> {
fn new(gcx: Gcx<'gcx>, src: &'src str, src_dir: &'src Path) -> Self {
Self { gcx, src, src_dir, dependencies: vec![], referenced_contracts: HashSet::default() }
}
/// Collects reference identified as bytecode dependency of analyzed contract.
/// Discards any reference that is not in project src directory (e.g. external
/// libraries or mock contracts that extend source contracts).
fn collect_dependency(&mut self, dependency: BytecodeDependency) {
let contract = self.gcx.hir.contract(dependency.referenced_contract);
let source = self.gcx.hir.source(contract.source);
let FileName::Real(path) = &source.file.name else {
return;
};
if !path.starts_with(self.src_dir) {
let path = path.display();
trace!("ignore dependency {path}");
return;
}
self.referenced_contracts.insert(dependency.referenced_contract);
self.dependencies.push(dependency);
}
}
impl<'gcx> Visit<'gcx> for BytecodeDependencyCollector<'gcx, '_> {
type BreakValue = Never;
fn hir(&self) -> &'gcx Hir<'gcx> {
&self.gcx.hir
}
fn visit_expr(&mut self, expr: &'gcx Expr<'gcx>) -> ControlFlow<Self::BreakValue> {
match &expr.kind {
ExprKind::Call(call_expr, call_args, named_args) => {
if let Some(dependency) = handle_call_expr(
self.src,
self.gcx.sess.source_map(),
expr,
call_expr,
call_args,
named_args,
) {
self.collect_dependency(dependency);
}
}
ExprKind::Member(member_expr, ident) => {
if let ExprKind::TypeCall(ty) = &member_expr.kind
&& let TypeKind::Custom(contract_id) = &ty.kind
&& ident.name.as_str() == "creationCode"
&& let Some(contract_id) = contract_id.as_contract()
{
self.collect_dependency(BytecodeDependency {
kind: BytecodeDependencyKind::CreationCode,
loc: span_to_range(self.gcx.sess.source_map(), expr.span),
referenced_contract: contract_id,
});
}
}
_ => {}
}
self.walk_expr(expr)
}
fn visit_stmt(&mut self, stmt: &'gcx Stmt<'gcx>) -> ControlFlow<Self::BreakValue> {
if let StmtKind::Try(stmt_try) = stmt.kind
&& let ExprKind::Call(call_expr, call_args, named_args) = &stmt_try.expr.kind
&& let Some(mut dependency) = handle_call_expr(
self.src,
self.gcx.sess.source_map(),
&stmt_try.expr,
call_expr,
call_args,
named_args,
)
{
let has_custom_return = if let Some(clause) = stmt_try.clauses.first()
&& clause.args.len() == 1
&& let Some(ret_var) = clause.args.first()
&& let TypeKind::Custom(_) = self.hir().variable(*ret_var).ty.kind
{
true
} else {
false
};
if let BytecodeDependencyKind::New { try_stmt, .. } = &mut dependency.kind {
*try_stmt = Some(has_custom_return);
}
self.collect_dependency(dependency);
for clause in stmt_try.clauses {
for &var in clause.args {
self.visit_nested_var(var)?;
}
for stmt in clause.block.stmts {
self.visit_stmt(stmt)?;
}
}
return ControlFlow::Continue(());
}
self.walk_stmt(stmt)
}
}
/// Helper function to analyze and extract bytecode dependency from a given call expression.
fn handle_call_expr(
src: &str,
source_map: &SourceMap,
parent_expr: &Expr<'_>,
call_expr: &Expr<'_>,
call_args: &CallArgs<'_>,
named_args: &Option<&[NamedArg<'_>]>,
) -> Option<BytecodeDependency> {
if let ExprKind::New(ty_new) = &call_expr.kind
&& let TypeKind::Custom(item_id) = ty_new.kind
&& let Some(contract_id) = item_id.as_contract()
{
let name_loc = span_to_range(source_map, ty_new.span);
let name = &src[name_loc];
// Calculate offset to remove named args, e.g. for an expression like
// `new Counter {value: 333} ( address(this))`
// the offset will be used to replace `{value: 333} ( ` with `(`
let call_args_offset = if named_args.is_some() && !call_args.is_empty() {
(call_args.span.lo() - ty_new.span.hi()).to_usize()
} else {
0
};
let args_len = parent_expr.span.hi() - ty_new.span.hi();
return Some(BytecodeDependency {
kind: BytecodeDependencyKind::New {
name: name.to_string(),
args_length: args_len.to_usize(),
call_args_offset,
value: named_arg(src, named_args, "value", source_map),
salt: named_arg(src, named_args, "salt", source_map),
try_stmt: None,
},
loc: span_to_range(source_map, call_expr.span),
referenced_contract: contract_id,
});
}
None
}
/// Helper function to extract value of a given named arg.
fn named_arg(
src: &str,
named_args: &Option<&[NamedArg<'_>]>,
arg: &str,
source_map: &SourceMap,
) -> Option<String> {
named_args.unwrap_or_default().iter().find(|named_arg| named_arg.name.as_str() == arg).map(
|named_arg| {
let named_arg_loc = span_to_range(source_map, named_arg.value.span);
src[named_arg_loc].to_string()
},
)
}
/// Goes over all test/script files and replaces bytecode dependencies with cheatcode
/// invocations.
///
/// Special handling of try/catch statements with custom returns, where the try statement becomes
/// ```solidity
/// try this.addressToCounter() returns (Counter c)
/// ```
/// and helper to cast address is appended
/// ```solidity
/// function addressToCounter(address addr) returns (Counter) {
/// return Counter(addr);
/// }
/// ```
pub(crate) fn remove_bytecode_dependencies(
gcx: Gcx<'_>,
deps: &PreprocessorDependencies,
data: &PreprocessorData,
) -> Updates {
let mut updates = Updates::default();
for (contract_id, deps) in &deps.preprocessed_contracts {
let contract = gcx.hir.contract(*contract_id);
let source = gcx.hir.source(contract.source);
let FileName::Real(path) = &source.file.name else {
continue;
};
let updates = updates.entry(path.clone()).or_default();
let mut used_helpers = BTreeSet::new();
let vm_interface_name = format!("VmContractHelper{}", contract_id.get());
// `address(uint160(uint256(keccak256("hevm cheat code"))))`
let vm = format!("{vm_interface_name}(0x7109709ECfa91a80626fF3989D68f67F5b1DD12D)");
let mut try_catch_helpers: HashSet<&str> = HashSet::default();
for dep in deps {
let Some(ContractData { artifact, constructor_data, .. }) =
data.get(&dep.referenced_contract)
else {
continue;
};
match &dep.kind {
BytecodeDependencyKind::CreationCode => {
// for creation code we need to just call getCode
updates.insert((
dep.loc.start,
dep.loc.end,
format!("{vm}.getCode(\"{artifact}\")"),
));
}
BytecodeDependencyKind::New {
name,
args_length,
call_args_offset,
value,
salt,
try_stmt,
} => {
let (mut update, closing_seq) = if let Some(has_ret) = try_stmt {
if *has_ret {
// try this.addressToCounter1() returns (Counter c)
try_catch_helpers.insert(name);
(format!("this.addressTo{name}{id}(", id = contract_id.get()), "}))")
} else {
(String::new(), "})")
}
} else {
(format!("{name}(payable("), "})))")
};
update.push_str(&format!("{vm}.deployCode({{"));
update.push_str(&format!("_artifact: \"{artifact}\""));
if let Some(value) = value {
update.push_str(", ");
update.push_str(&format!("_value: {value}"));
}
if let Some(salt) = salt {
update.push_str(", ");
update.push_str(&format!("_salt: {salt}"));
}
if constructor_data.is_some() {
// Insert our helper
used_helpers.insert(dep.referenced_contract);
update.push_str(", ");
update.push_str(&format!(
"_args: encodeArgs{id}(DeployHelper{id}.FoundryPpConstructorArgs",
id = dep.referenced_contract.get()
));
updates.insert((dep.loc.start, dep.loc.end + call_args_offset, update));
updates.insert((
dep.loc.end + args_length,
dep.loc.end + args_length,
format!("){closing_seq}"),
));
} else {
update.push_str(closing_seq);
updates.insert((dep.loc.start, dep.loc.end + args_length, update));
}
}
};
}
// Add try catch statements after last function of the test contract.
if !try_catch_helpers.is_empty()
&& let Some(last_fn_id) = contract.functions().last()
{
let last_fn_range =
span_to_range(gcx.sess.source_map(), gcx.hir.function(last_fn_id).span);
let to_address_fns = try_catch_helpers
.iter()
.map(|ty| {
format!(
r#"
function addressTo{ty}{id}(address addr) public pure returns ({ty}) {{
return {ty}(addr);
}}
"#,
id = contract_id.get()
)
})
.collect::<String>();
updates.insert((last_fn_range.end, last_fn_range.end, to_address_fns));
}
let helper_imports = used_helpers.into_iter().map(|id| {
let id = id.get();
format!(
"import {{DeployHelper{id}, encodeArgs{id}}} from \"foundry-pp/DeployHelper{id}.sol\";",
)
}).join("\n");
updates.insert((
source.file.src.len(),
source.file.src.len(),
format!(
r#"
{helper_imports}
interface {vm_interface_name} {{
function deployCode(string memory _artifact) external returns (address);
function deployCode(string memory _artifact, bytes32 _salt) external returns (address);
function deployCode(string memory _artifact, bytes memory _args) external returns (address);
function deployCode(string memory _artifact, bytes memory _args, bytes32 _salt) external returns (address);
function deployCode(string memory _artifact, uint256 _value) external returns (address);
function deployCode(string memory _artifact, uint256 _value, bytes32 _salt) external returns (address);
function deployCode(string memory _artifact, bytes memory _args, uint256 _value) external returns (address);
function deployCode(string memory _artifact, bytes memory _args, uint256 _value, bytes32 _salt) external returns (address);
function getCode(string memory _artifact) external returns (bytes memory);
}}"#
),
));
}
updates
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/preprocessor/mod.rs | crates/common/src/preprocessor/mod.rs | use crate::errors::convert_solar_errors;
use foundry_compilers::{
Compiler, ProjectPathsConfig, SourceParser, apply_updates,
artifacts::SolcLanguage,
error::Result,
multi::{MultiCompiler, MultiCompilerInput, MultiCompilerLanguage},
project::Preprocessor,
solc::{SolcCompiler, SolcVersionedInput},
};
use solar::parse::{ast::Span, interface::SourceMap};
use std::{
collections::HashSet,
ops::{ControlFlow, Range},
path::PathBuf,
};
mod data;
use data::{collect_preprocessor_data, create_deploy_helpers};
mod deps;
use deps::{PreprocessorDependencies, remove_bytecode_dependencies};
/// Returns the range of the given span in the source map.
#[track_caller]
fn span_to_range(source_map: &SourceMap, span: Span) -> Range<usize> {
source_map.span_to_range(span).unwrap()
}
/// Preprocessor that replaces static bytecode linking in tests and scripts (`new Contract`) with
/// dynamic linkage through (`Vm.create*`).
///
/// This allows for more efficient caching when iterating on tests.
///
/// See <https://github.com/foundry-rs/foundry/pull/10010>.
#[derive(Debug)]
pub struct DynamicTestLinkingPreprocessor;
impl Preprocessor<SolcCompiler> for DynamicTestLinkingPreprocessor {
#[instrument(name = "DynamicTestLinkingPreprocessor::preprocess", skip_all)]
fn preprocess(
&self,
_solc: &SolcCompiler,
input: &mut SolcVersionedInput,
paths: &ProjectPathsConfig<SolcLanguage>,
mocks: &mut HashSet<PathBuf>,
) -> Result<()> {
// Skip if we are not preprocessing any tests or scripts. Avoids unnecessary AST parsing.
if !input.input.sources.iter().any(|(path, _)| paths.is_test_or_script(path)) {
trace!("no tests or sources to preprocess");
return Ok(());
}
let mut compiler =
foundry_compilers::resolver::parse::SolParser::new(paths.with_language_ref())
.into_compiler();
let _ = compiler.enter_mut(|compiler| -> solar::interface::Result {
let mut pcx = compiler.parse();
// Add the sources into the context.
// Include all sources in the source map so as to not re-load them from disk, but only
// parse and preprocess tests and scripts.
let mut preprocessed_paths = vec![];
let sources = &mut input.input.sources;
for (path, source) in sources.iter() {
if let Ok(src_file) = compiler
.sess()
.source_map()
.new_source_file(path.clone(), source.content.as_str())
&& paths.is_test_or_script(path)
{
pcx.add_file(src_file);
preprocessed_paths.push(path.clone());
}
}
// Parse and preprocess.
pcx.parse();
let ControlFlow::Continue(()) = compiler.lower_asts()? else { return Ok(()) };
let gcx = compiler.gcx();
// Collect tests and scripts dependencies and identify mock contracts.
let deps = PreprocessorDependencies::new(
gcx,
&preprocessed_paths,
&paths.paths_relative().sources,
&paths.root,
mocks,
);
// Collect data of source contracts referenced in tests and scripts.
let data = collect_preprocessor_data(gcx, &deps.referenced_contracts);
// Extend existing sources with preprocessor deploy helper sources.
sources.extend(create_deploy_helpers(&data));
// Generate and apply preprocessor source updates.
apply_updates(sources, remove_bytecode_dependencies(gcx, &deps, &data));
Ok(())
});
// Warn if any diagnostics emitted during content parsing.
if let Err(err) = convert_solar_errors(compiler.dcx()) {
warn!(%err, "failed preprocessing");
}
Ok(())
}
}
impl Preprocessor<MultiCompiler> for DynamicTestLinkingPreprocessor {
fn preprocess(
&self,
compiler: &MultiCompiler,
input: &mut <MultiCompiler as Compiler>::Input,
paths: &ProjectPathsConfig<MultiCompilerLanguage>,
mocks: &mut HashSet<PathBuf>,
) -> Result<()> {
// Preprocess only Solc compilers.
let MultiCompilerInput::Solc(input) = input else { return Ok(()) };
let Some(solc) = &compiler.solc else { return Ok(()) };
let paths = paths.clone().with_language::<SolcLanguage>();
self.preprocess(solc, input, &paths, mocks)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/preprocessor/data.rs | crates/common/src/preprocessor/data.rs | use super::span_to_range;
use foundry_compilers::artifacts::{Source, Sources};
use path_slash::PathExt;
use solar::sema::{
Gcx,
hir::{Contract, ContractId},
interface::source_map::FileName,
};
use std::{
collections::{BTreeMap, HashSet},
path::{Path, PathBuf},
};
/// Keeps data about project contracts definitions referenced from tests and scripts.
/// Contract id -> Contract data definition mapping.
pub type PreprocessorData = BTreeMap<ContractId, ContractData>;
/// Collects preprocessor data from referenced contracts.
pub(crate) fn collect_preprocessor_data(
gcx: Gcx<'_>,
referenced_contracts: &HashSet<ContractId>,
) -> PreprocessorData {
let mut data = PreprocessorData::default();
for contract_id in referenced_contracts {
let contract = gcx.hir.contract(*contract_id);
let source = gcx.hir.source(contract.source);
let FileName::Real(path) = &source.file.name else {
continue;
};
let contract_data = ContractData::new(gcx, *contract_id, contract, path, source);
data.insert(*contract_id, contract_data);
}
data
}
/// Creates helper libraries for contracts with a non-empty constructor.
///
/// See [`ContractData::build_helper`] for more details.
pub(crate) fn create_deploy_helpers(data: &BTreeMap<ContractId, ContractData>) -> Sources {
let mut deploy_helpers = Sources::new();
for (contract_id, contract) in data {
if let Some(code) = contract.build_helper() {
let path = format!("foundry-pp/DeployHelper{}.sol", contract_id.get());
deploy_helpers.insert(path.into(), Source::new(code));
}
}
deploy_helpers
}
/// Keeps data about a contract constructor.
#[derive(Debug)]
pub struct ContractConstructorData {
/// ABI encoded args.
pub abi_encode_args: String,
/// Constructor struct fields.
pub struct_fields: String,
}
/// Keeps data about a single contract definition.
#[derive(Debug)]
pub(crate) struct ContractData {
/// HIR Id of the contract.
contract_id: ContractId,
/// Path of the source file.
path: PathBuf,
/// Name of the contract
name: String,
/// Constructor parameters, if any.
pub constructor_data: Option<ContractConstructorData>,
/// Artifact string to pass into cheatcodes.
pub artifact: String,
}
impl ContractData {
fn new(
gcx: Gcx<'_>,
contract_id: ContractId,
contract: &Contract<'_>,
path: &Path,
source: &solar::sema::hir::Source<'_>,
) -> Self {
let artifact = format!("{}:{}", path.to_slash_lossy(), contract.name);
// Process data for contracts with constructor and parameters.
let constructor_data = contract
.ctor
.map(|ctor_id| gcx.hir.function(ctor_id))
.filter(|ctor| !ctor.parameters.is_empty())
.map(|ctor| {
let mut abi_encode_args = vec![];
let mut struct_fields = vec![];
let mut arg_index = 0;
for param_id in ctor.parameters {
let src = source.file.src.as_str();
let loc =
span_to_range(gcx.sess.source_map(), gcx.hir.variable(*param_id).span);
let mut new_src = src[loc].replace(" memory ", " ").replace(" calldata ", " ");
if let Some(ident) = gcx.hir.variable(*param_id).name {
abi_encode_args.push(format!("args.{}", ident.name));
} else {
// Generate an unique name if constructor arg doesn't have one.
arg_index += 1;
abi_encode_args.push(format!("args.foundry_pp_ctor_arg{arg_index}"));
new_src.push_str(&format!(" foundry_pp_ctor_arg{arg_index}"));
}
struct_fields.push(new_src);
}
ContractConstructorData {
abi_encode_args: abi_encode_args.join(", "),
struct_fields: struct_fields.join("; "),
}
});
Self {
contract_id,
path: path.to_path_buf(),
name: contract.name.to_string(),
constructor_data,
artifact,
}
}
/// If contract has a non-empty constructor, generates a helper source file for it containing a
/// helper to encode constructor arguments.
///
/// This is needed because current preprocessing wraps the arguments, leaving them unchanged.
/// This allows us to handle nested new expressions correctly. However, this requires us to have
/// a way to wrap both named and unnamed arguments. i.e you can't do abi.encode({arg: val}).
///
/// This function produces a helper struct + a helper function to encode the arguments. The
/// struct is defined in scope of an abstract contract inheriting the contract containing the
/// constructor. This is done as a hack to allow us to inherit the same scope of definitions.
///
/// The resulted helper looks like this:
/// ```solidity
/// import "lib/openzeppelin-contracts/contracts/token/ERC20.sol";
///
/// abstract contract DeployHelper335 is ERC20 {
/// struct FoundryPpConstructorArgs {
/// string name;
/// string symbol;
/// }
/// }
///
/// function encodeArgs335(DeployHelper335.FoundryPpConstructorArgs memory args) pure returns (bytes memory) {
/// return abi.encode(args.name, args.symbol);
/// }
/// ```
///
/// Example usage:
/// ```solidity
/// new ERC20(name, symbol)
/// ```
/// becomes
/// ```solidity
/// vm.deployCode("artifact path", encodeArgs335(DeployHelper335.FoundryPpConstructorArgs(name, symbol)))
/// ```
/// With named arguments:
/// ```solidity
/// new ERC20({name: name, symbol: symbol})
/// ```
/// becomes
/// ```solidity
/// vm.deployCode("artifact path", encodeArgs335(DeployHelper335.FoundryPpConstructorArgs({name: name, symbol: symbol})))
/// ```
pub fn build_helper(&self) -> Option<String> {
let Self { contract_id, path, name, constructor_data, artifact: _ } = self;
let Some(constructor_details) = constructor_data else { return None };
let contract_id = contract_id.get();
let struct_fields = &constructor_details.struct_fields;
let abi_encode_args = &constructor_details.abi_encode_args;
let helper = format!(
r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.0;
import "{path}";
abstract contract DeployHelper{contract_id} is {name} {{
struct FoundryPpConstructorArgs {{
{struct_fields};
}}
}}
function encodeArgs{contract_id}(DeployHelper{contract_id}.FoundryPpConstructorArgs memory args) pure returns (bytes memory) {{
return abi.encode({abi_encode_args});
}}
"#,
path = path.to_slash_lossy(),
);
Some(helper)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/provider/runtime_transport.rs | crates/common/src/provider/runtime_transport.rs | //! Runtime transport that connects on first request, which can take either of an HTTP,
//! WebSocket, or IPC transport. Retries are handled by a client layer (e.g.,
//! `RetryBackoffLayer`) when used.
use crate::{DEFAULT_USER_AGENT, REQUEST_TIMEOUT};
use alloy_json_rpc::{RequestPacket, ResponsePacket};
use alloy_pubsub::{PubSubConnect, PubSubFrontend};
use alloy_rpc_types::engine::{Claims, JwtSecret};
use alloy_transport::{
Authorization, BoxTransport, TransportError, TransportErrorKind, TransportFut,
};
use alloy_transport_http::Http;
use alloy_transport_ipc::IpcConnect;
use alloy_transport_ws::WsConnect;
use reqwest::header::{HeaderName, HeaderValue};
use std::{fmt, path::PathBuf, str::FromStr, sync::Arc};
use thiserror::Error;
use tokio::sync::RwLock;
use tower::Service;
use url::Url;
/// An enum representing the different transports that can be used to connect to a runtime.
/// Only meant to be used internally by [RuntimeTransport].
#[derive(Clone, Debug)]
pub enum InnerTransport {
/// HTTP transport
Http(Http<reqwest::Client>),
/// WebSocket transport
Ws(PubSubFrontend),
/// IPC transport
Ipc(PubSubFrontend),
}
/// Error type for the runtime transport.
#[derive(Error, Debug)]
pub enum RuntimeTransportError {
/// Internal transport error
#[error("Internal transport error: {0} with {1}")]
TransportError(TransportError, String),
/// Invalid URL scheme
#[error("URL scheme is not supported: {0}")]
BadScheme(String),
/// Invalid HTTP header
#[error("Invalid HTTP header: {0}")]
BadHeader(String),
/// Invalid file path
#[error("Invalid IPC file path: {0}")]
BadPath(String),
/// Invalid construction of Http provider
#[error(transparent)]
HttpConstructionError(#[from] reqwest::Error),
/// Invalid JWT
#[error("Invalid JWT: {0}")]
InvalidJwt(String),
}
/// Runtime transport that only connects on first request.
///
/// A runtime transport is a custom [`alloy_transport::Transport`] that only connects when the
/// *first* request is made. When the first request is made, it will connect to the runtime using
/// either an HTTP WebSocket, or IPC transport depending on the URL used.
/// Retries for rate-limiting and timeout-related errors are handled by an external
/// client layer (e.g., `RetryBackoffLayer`) when configured.
#[derive(Clone, Debug)]
pub struct RuntimeTransport {
/// The inner actual transport used.
inner: Arc<RwLock<Option<InnerTransport>>>,
/// The URL to connect to.
url: Url,
/// The headers to use for requests.
headers: Vec<String>,
/// The JWT to use for requests.
jwt: Option<String>,
/// The timeout for requests.
timeout: std::time::Duration,
/// Whether to accept invalid certificates.
accept_invalid_certs: bool,
}
/// A builder for [RuntimeTransport].
#[derive(Debug)]
pub struct RuntimeTransportBuilder {
url: Url,
headers: Vec<String>,
jwt: Option<String>,
timeout: std::time::Duration,
accept_invalid_certs: bool,
}
impl RuntimeTransportBuilder {
/// Create a new builder with the given URL.
pub fn new(url: Url) -> Self {
Self {
url,
headers: vec![],
jwt: None,
timeout: REQUEST_TIMEOUT,
accept_invalid_certs: false,
}
}
/// Set the URL for the transport.
pub fn with_headers(mut self, headers: Vec<String>) -> Self {
self.headers = headers;
self
}
/// Set the JWT for the transport.
pub fn with_jwt(mut self, jwt: Option<String>) -> Self {
self.jwt = jwt;
self
}
/// Set the timeout for the transport.
pub fn with_timeout(mut self, timeout: std::time::Duration) -> Self {
self.timeout = timeout;
self
}
/// Set whether to accept invalid certificates.
pub fn accept_invalid_certs(mut self, accept_invalid_certs: bool) -> Self {
self.accept_invalid_certs = accept_invalid_certs;
self
}
/// Builds the [RuntimeTransport] and returns it in a disconnected state.
/// The runtime transport will then connect when the first request happens.
pub fn build(self) -> RuntimeTransport {
RuntimeTransport {
inner: Arc::new(RwLock::new(None)),
url: self.url,
headers: self.headers,
jwt: self.jwt,
timeout: self.timeout,
accept_invalid_certs: self.accept_invalid_certs,
}
}
}
impl fmt::Display for RuntimeTransport {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "RuntimeTransport {}", self.url)
}
}
impl RuntimeTransport {
/// Connects the underlying transport, depending on the URL scheme.
pub async fn connect(&self) -> Result<InnerTransport, RuntimeTransportError> {
match self.url.scheme() {
"http" | "https" => self.connect_http(),
"ws" | "wss" => self.connect_ws().await,
"file" => self.connect_ipc().await,
_ => Err(RuntimeTransportError::BadScheme(self.url.scheme().to_string())),
}
}
/// Creates a new reqwest client from this transport.
pub fn reqwest_client(&self) -> Result<reqwest::Client, RuntimeTransportError> {
let mut client_builder = reqwest::Client::builder()
.timeout(self.timeout)
.tls_built_in_root_certs(self.url.scheme() == "https")
.danger_accept_invalid_certs(self.accept_invalid_certs);
let mut headers = reqwest::header::HeaderMap::new();
// If there's a JWT, add it to the headers if we can decode it.
if let Some(jwt) = self.jwt.clone() {
let auth =
build_auth(jwt).map_err(|e| RuntimeTransportError::InvalidJwt(e.to_string()))?;
let mut auth_value: HeaderValue =
HeaderValue::from_str(&auth.to_string()).expect("Header should be valid string");
auth_value.set_sensitive(true);
headers.insert(reqwest::header::AUTHORIZATION, auth_value);
};
// Add any custom headers.
for header in &self.headers {
let make_err = || RuntimeTransportError::BadHeader(header.to_string());
let (key, val) = header.split_once(':').ok_or_else(make_err)?;
headers.insert(
HeaderName::from_str(key.trim()).map_err(|_| make_err())?,
HeaderValue::from_str(val.trim()).map_err(|_| make_err())?,
);
}
if !headers.contains_key(reqwest::header::USER_AGENT) {
headers.insert(
reqwest::header::USER_AGENT,
HeaderValue::from_str(DEFAULT_USER_AGENT)
.expect("User-Agent should be valid string"),
);
}
client_builder = client_builder.default_headers(headers);
Ok(client_builder.build()?)
}
/// Connects to an HTTP [alloy_transport_http::Http] transport.
fn connect_http(&self) -> Result<InnerTransport, RuntimeTransportError> {
let client = self.reqwest_client()?;
Ok(InnerTransport::Http(Http::with_client(client, self.url.clone())))
}
/// Connects to a WS transport.
async fn connect_ws(&self) -> Result<InnerTransport, RuntimeTransportError> {
let auth = self.jwt.as_ref().and_then(|jwt| build_auth(jwt.clone()).ok());
let mut ws = WsConnect::new(self.url.to_string());
if let Some(auth) = auth {
ws = ws.with_auth(auth);
};
let service = ws
.into_service()
.await
.map_err(|e| RuntimeTransportError::TransportError(e, self.url.to_string()))?;
Ok(InnerTransport::Ws(service))
}
/// Connects to an IPC transport.
async fn connect_ipc(&self) -> Result<InnerTransport, RuntimeTransportError> {
let path = url_to_file_path(&self.url)
.map_err(|_| RuntimeTransportError::BadPath(self.url.to_string()))?;
let ipc_connector = IpcConnect::new(path.clone());
let ipc = ipc_connector.into_service().await.map_err(|e| {
RuntimeTransportError::TransportError(e, path.clone().display().to_string())
})?;
Ok(InnerTransport::Ipc(ipc))
}
/// Sends a request using the underlying transport.
/// If this is the first request, it will connect to the appropriate transport depending on the
/// URL scheme. Retries are performed by an external client layer (e.g., `RetryBackoffLayer`),
/// if such a layer is configured by the caller.
/// For sending the actual request, this action is delegated down to the
/// underlying transport through Tower's [tower::Service::call]. See tower's [tower::Service]
/// trait for more information.
pub fn request(&self, req: RequestPacket) -> TransportFut<'static> {
let this = self.clone();
Box::pin(async move {
let mut inner = this.inner.read().await;
if inner.is_none() {
drop(inner);
{
let mut inner_mut = this.inner.write().await;
if inner_mut.is_none() {
*inner_mut =
Some(this.connect().await.map_err(TransportErrorKind::custom)?);
}
}
inner = this.inner.read().await;
}
// SAFETY: We just checked that the inner transport exists.
match inner.clone().expect("must've been initialized") {
InnerTransport::Http(mut http) => http.call(req),
InnerTransport::Ws(mut ws) => ws.call(req),
InnerTransport::Ipc(mut ipc) => ipc.call(req),
}
.await
})
}
/// Convert this transport into a boxed trait object.
pub fn boxed(self) -> BoxTransport
where
Self: Sized + Clone + Send + Sync + 'static,
{
BoxTransport::new(self)
}
}
impl tower::Service<RequestPacket> for RuntimeTransport {
type Response = ResponsePacket;
type Error = TransportError;
type Future = TransportFut<'static>;
#[inline]
fn poll_ready(
&mut self,
_cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), Self::Error>> {
std::task::Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: RequestPacket) -> Self::Future {
self.request(req)
}
}
impl tower::Service<RequestPacket> for &RuntimeTransport {
type Response = ResponsePacket;
type Error = TransportError;
type Future = TransportFut<'static>;
#[inline]
fn poll_ready(
&mut self,
_cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Result<(), Self::Error>> {
std::task::Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: RequestPacket) -> Self::Future {
self.request(req)
}
}
fn build_auth(jwt: String) -> eyre::Result<Authorization> {
// Decode jwt from hex, then generate claims (iat with current timestamp)
let secret = JwtSecret::from_hex(jwt)?;
let claims = Claims::default();
let token = secret.encode(&claims)?;
let auth = Authorization::Bearer(token);
Ok(auth)
}
#[cfg(windows)]
fn url_to_file_path(url: &Url) -> Result<PathBuf, ()> {
const PREFIX: &str = "file:///pipe/";
let url_str = url.as_str();
if let Some(pipe_name) = url_str.strip_prefix(PREFIX) {
let pipe_path = format!(r"\\.\pipe\{pipe_name}");
return Ok(PathBuf::from(pipe_path));
}
url.to_file_path()
}
#[cfg(not(windows))]
fn url_to_file_path(url: &Url) -> Result<PathBuf, ()> {
url.to_file_path()
}
#[cfg(test)]
mod tests {
use super::*;
use reqwest::header::HeaderMap;
#[tokio::test]
async fn test_user_agent_header() {
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap();
let url = Url::parse(&format!("http://{}", listener.local_addr().unwrap())).unwrap();
let http_handler = axum::routing::get(|actual_headers: HeaderMap| {
let user_agent = HeaderName::from_str("User-Agent").unwrap();
assert_eq!(actual_headers[user_agent], HeaderValue::from_str("test-agent").unwrap());
async { "" }
});
let server_task = tokio::spawn(async move {
axum::serve(listener, http_handler.into_make_service()).await.unwrap()
});
let transport = RuntimeTransportBuilder::new(url.clone())
.with_headers(vec!["User-Agent: test-agent".to_string()])
.build();
let inner = transport.connect_http().unwrap();
match inner {
InnerTransport::Http(http) => {
let _ = http.client().get(url).send().await.unwrap();
// assert inside http_handler
}
_ => unreachable!(),
}
server_task.abort();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/provider/mod.rs | crates/common/src/provider/mod.rs | //! Provider-related instantiation and usage utilities.
pub mod runtime_transport;
use crate::{
ALCHEMY_FREE_TIER_CUPS, REQUEST_TIMEOUT, provider::runtime_transport::RuntimeTransportBuilder,
};
use alloy_chains::NamedChain;
use alloy_provider::{
Identity, ProviderBuilder as AlloyProviderBuilder, RootProvider,
fillers::{ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller, WalletFiller},
network::{AnyNetwork, EthereumWallet},
};
use alloy_rpc_client::ClientBuilder;
use alloy_transport::{layers::RetryBackoffLayer, utils::guess_local_url};
use eyre::{Result, WrapErr};
use reqwest::Url;
use std::{
net::SocketAddr,
path::{Path, PathBuf},
str::FromStr,
time::Duration,
};
use url::ParseError;
/// The assumed block time for unknown chains.
/// We assume that these are chains have a faster block time.
const DEFAULT_UNKNOWN_CHAIN_BLOCK_TIME: Duration = Duration::from_secs(3);
/// The factor to scale the block time by to get the poll interval.
const POLL_INTERVAL_BLOCK_TIME_SCALE_FACTOR: f32 = 0.6;
/// Helper type alias for a retry provider
pub type RetryProvider<N = AnyNetwork> = RootProvider<N>;
/// Helper type alias for a retry provider with a signer
pub type RetryProviderWithSigner<N = AnyNetwork> = FillProvider<
JoinFill<
JoinFill<
Identity,
JoinFill<
GasFiller,
JoinFill<
alloy_provider::fillers::BlobGasFiller,
JoinFill<NonceFiller, ChainIdFiller>,
>,
>,
>,
WalletFiller<EthereumWallet>,
>,
RootProvider<N>,
N,
>;
/// Constructs a provider with a 100 millisecond interval poll if it's a localhost URL (most likely
/// an anvil or other dev node) and with the default, or 7 second otherwise.
///
/// See [`try_get_http_provider`] for more details.
///
/// # Panics
///
/// Panics if the URL is invalid.
///
/// # Examples
///
/// ```
/// use foundry_common::provider::get_http_provider;
///
/// let retry_provider = get_http_provider("http://localhost:8545");
/// ```
#[inline]
#[track_caller]
pub fn get_http_provider(builder: impl AsRef<str>) -> RetryProvider {
try_get_http_provider(builder).unwrap()
}
/// Constructs a provider with a 100 millisecond interval poll if it's a localhost URL (most likely
/// an anvil or other dev node) and with the default, or 7 second otherwise.
#[inline]
pub fn try_get_http_provider(builder: impl AsRef<str>) -> Result<RetryProvider> {
ProviderBuilder::new(builder.as_ref()).build()
}
/// Helper type to construct a `RetryProvider`
#[derive(Debug)]
pub struct ProviderBuilder {
// Note: this is a result, so we can easily chain builder calls
url: Result<Url>,
chain: NamedChain,
max_retry: u32,
initial_backoff: u64,
timeout: Duration,
/// available CUPS
compute_units_per_second: u64,
/// JWT Secret
jwt: Option<String>,
headers: Vec<String>,
is_local: bool,
/// Whether to accept invalid certificates.
accept_invalid_certs: bool,
}
impl ProviderBuilder {
/// Creates a new builder instance
pub fn new(url_str: &str) -> Self {
// a copy is needed for the next lines to work
let mut url_str = url_str;
// invalid url: non-prefixed URL scheme is not allowed, so we prepend the default http
// prefix
let storage;
if url_str.starts_with("localhost:") {
storage = format!("http://{url_str}");
url_str = storage.as_str();
}
let url = Url::parse(url_str)
.or_else(|err| match err {
ParseError::RelativeUrlWithoutBase => {
if SocketAddr::from_str(url_str).is_ok() {
Url::parse(&format!("http://{url_str}"))
} else {
let path = Path::new(url_str);
if let Ok(path) = resolve_path(path) {
Url::parse(&format!("file://{}", path.display()))
} else {
Err(err)
}
}
}
_ => Err(err),
})
.wrap_err_with(|| format!("invalid provider URL: {url_str:?}"));
// Use the final URL string to guess if it's a local URL.
let is_local = url.as_ref().is_ok_and(|url| guess_local_url(url.as_str()));
Self {
url,
chain: NamedChain::Mainnet,
max_retry: 8,
initial_backoff: 800,
timeout: REQUEST_TIMEOUT,
// alchemy max cpus <https://docs.alchemy.com/reference/compute-units#what-are-cups-compute-units-per-second>
compute_units_per_second: ALCHEMY_FREE_TIER_CUPS,
jwt: None,
headers: vec![],
is_local,
accept_invalid_certs: false,
}
}
/// Enables a request timeout.
///
/// The timeout is applied from when the request starts connecting until the
/// response body has finished.
///
/// Default is no timeout.
pub fn timeout(mut self, timeout: Duration) -> Self {
self.timeout = timeout;
self
}
/// Sets the chain of the node the provider will connect to
pub fn chain(mut self, chain: NamedChain) -> Self {
self.chain = chain;
self
}
/// How often to retry a failed request
pub fn max_retry(mut self, max_retry: u32) -> Self {
self.max_retry = max_retry;
self
}
/// How often to retry a failed request. If `None`, defaults to the already-set value.
pub fn maybe_max_retry(mut self, max_retry: Option<u32>) -> Self {
self.max_retry = max_retry.unwrap_or(self.max_retry);
self
}
/// The starting backoff delay to use after the first failed request. If `None`, defaults to
/// the already-set value.
pub fn maybe_initial_backoff(mut self, initial_backoff: Option<u64>) -> Self {
self.initial_backoff = initial_backoff.unwrap_or(self.initial_backoff);
self
}
/// The starting backoff delay to use after the first failed request
pub fn initial_backoff(mut self, initial_backoff: u64) -> Self {
self.initial_backoff = initial_backoff;
self
}
/// Sets the number of assumed available compute units per second
///
/// See also, <https://docs.alchemy.com/reference/compute-units#what-are-cups-compute-units-per-second>
pub fn compute_units_per_second(mut self, compute_units_per_second: u64) -> Self {
self.compute_units_per_second = compute_units_per_second;
self
}
/// Sets the number of assumed available compute units per second
///
/// See also, <https://docs.alchemy.com/reference/compute-units#what-are-cups-compute-units-per-second>
pub fn compute_units_per_second_opt(mut self, compute_units_per_second: Option<u64>) -> Self {
if let Some(cups) = compute_units_per_second {
self.compute_units_per_second = cups;
}
self
}
/// Sets the provider to be local.
///
/// This is useful for local dev nodes.
pub fn local(mut self, is_local: bool) -> Self {
self.is_local = is_local;
self
}
/// Sets aggressive `max_retry` and `initial_backoff` values
///
/// This is only recommend for local dev nodes
pub fn aggressive(self) -> Self {
self.max_retry(100).initial_backoff(100).local(true)
}
/// Sets the JWT secret
pub fn jwt(mut self, jwt: impl Into<String>) -> Self {
self.jwt = Some(jwt.into());
self
}
/// Sets http headers
pub fn headers(mut self, headers: Vec<String>) -> Self {
self.headers = headers;
self
}
/// Sets http headers. If `None`, defaults to the already-set value.
pub fn maybe_headers(mut self, headers: Option<Vec<String>>) -> Self {
self.headers = headers.unwrap_or(self.headers);
self
}
/// Sets whether to accept invalid certificates.
pub fn accept_invalid_certs(mut self, accept_invalid_certs: bool) -> Self {
self.accept_invalid_certs = accept_invalid_certs;
self
}
/// Constructs the `RetryProvider` taking all configs into account.
pub fn build(self) -> Result<RetryProvider> {
let Self {
url,
chain,
max_retry,
initial_backoff,
timeout,
compute_units_per_second,
jwt,
headers,
is_local,
accept_invalid_certs,
} = self;
let url = url?;
let retry_layer =
RetryBackoffLayer::new(max_retry, initial_backoff, compute_units_per_second);
let transport = RuntimeTransportBuilder::new(url)
.with_timeout(timeout)
.with_headers(headers)
.with_jwt(jwt)
.accept_invalid_certs(accept_invalid_certs)
.build();
let client = ClientBuilder::default().layer(retry_layer).transport(transport, is_local);
if !is_local {
client.set_poll_interval(
chain
.average_blocktime_hint()
// we cap the poll interval because if not provided, chain would default to
// mainnet
.map(|hint| hint.min(DEFAULT_UNKNOWN_CHAIN_BLOCK_TIME))
.unwrap_or(DEFAULT_UNKNOWN_CHAIN_BLOCK_TIME)
.mul_f32(POLL_INTERVAL_BLOCK_TIME_SCALE_FACTOR),
);
}
let provider = AlloyProviderBuilder::<_, _, AnyNetwork>::default()
.connect_provider(RootProvider::new(client));
Ok(provider)
}
/// Constructs the `RetryProvider` with a wallet.
pub fn build_with_wallet(self, wallet: EthereumWallet) -> Result<RetryProviderWithSigner> {
let Self {
url,
chain,
max_retry,
initial_backoff,
timeout,
compute_units_per_second,
jwt,
headers,
is_local,
accept_invalid_certs,
} = self;
let url = url?;
let retry_layer =
RetryBackoffLayer::new(max_retry, initial_backoff, compute_units_per_second);
let transport = RuntimeTransportBuilder::new(url)
.with_timeout(timeout)
.with_headers(headers)
.with_jwt(jwt)
.accept_invalid_certs(accept_invalid_certs)
.build();
let client = ClientBuilder::default().layer(retry_layer).transport(transport, is_local);
if !is_local {
client.set_poll_interval(
chain
.average_blocktime_hint()
// we cap the poll interval because if not provided, chain would default to
// mainnet
.map(|hint| hint.min(DEFAULT_UNKNOWN_CHAIN_BLOCK_TIME))
.unwrap_or(DEFAULT_UNKNOWN_CHAIN_BLOCK_TIME)
.mul_f32(POLL_INTERVAL_BLOCK_TIME_SCALE_FACTOR),
);
}
let provider = AlloyProviderBuilder::<_, _, AnyNetwork>::default()
.with_recommended_fillers()
.wallet(wallet)
.connect_provider(RootProvider::new(client));
Ok(provider)
}
}
#[cfg(not(windows))]
fn resolve_path(path: &Path) -> Result<PathBuf, ()> {
if path.is_absolute() {
Ok(path.to_path_buf())
} else {
std::env::current_dir().map(|d| d.join(path)).map_err(drop)
}
}
#[cfg(windows)]
fn resolve_path(path: &Path) -> Result<PathBuf, ()> {
if let Some(s) = path.to_str()
&& s.starts_with(r"\\.\pipe\")
{
return Ok(path.to_path_buf());
}
Err(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_auto_correct_missing_prefix() {
let builder = ProviderBuilder::new("localhost:8545");
assert!(builder.url.is_ok());
let url = builder.url.unwrap();
assert_eq!(url, Url::parse("http://localhost:8545").unwrap());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/comments/mod.rs | crates/common/src/comments/mod.rs | use crate::iter::IterDelimited;
use solar::parse::{
ast::{CommentKind, Span},
interface::{BytePos, CharPos, SourceMap, source_map::SourceFile},
lexer::token::RawTokenKind as TokenKind,
};
use std::fmt;
mod comment;
pub use comment::{Comment, CommentStyle};
pub mod inline_config;
pub const DISABLE_START: &str = "forgefmt: disable-start";
pub const DISABLE_END: &str = "forgefmt: disable-end";
pub struct Comments {
comments: std::collections::VecDeque<Comment>,
}
impl fmt::Debug for Comments {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Comments")?;
f.debug_list().entries(self.iter()).finish()
}
}
impl Comments {
pub fn new(
sf: &SourceFile,
sm: &SourceMap,
normalize_cmnts: bool,
group_cmnts: bool,
tab_width: Option<usize>,
) -> Self {
let gatherer = CommentGatherer::new(sf, sm, normalize_cmnts, tab_width).gather();
Self {
comments: if group_cmnts { gatherer.group().into() } else { gatherer.comments.into() },
}
}
pub fn peek(&self) -> Option<&Comment> {
self.comments.front()
}
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<Comment> {
self.comments.pop_front()
}
pub fn iter(&self) -> impl Iterator<Item = &Comment> {
self.comments.iter()
}
/// Adds a new comment at the beginning of the list.
///
/// Should only be used when comments are gathered scattered, and must be manually sorted.
///
/// **WARNING:** This struct works under the assumption that comments are always sorted by
/// ascending span position. It is the caller's responsibility to ensure that this premise
/// always holds true.
pub fn push_front(&mut self, cmnt: Comment) {
self.comments.push_front(cmnt)
}
/// Finds the first trailing comment on the same line as `span_pos`, allowing for `Mixed`
/// style comments to appear before it.
///
/// Returns the comment and its index in the buffer.
pub fn peek_trailing(
&self,
sm: &SourceMap,
span_pos: BytePos,
next_pos: Option<BytePos>,
) -> Option<(&Comment, usize)> {
let span_line = sm.lookup_char_pos(span_pos).line;
for (i, cmnt) in self.iter().enumerate() {
// If we have moved to the next line, we can stop.
let comment_line = sm.lookup_char_pos(cmnt.pos()).line;
if comment_line != span_line {
break;
}
// The comment must start after the given span position.
if cmnt.pos() < span_pos {
continue;
}
// The comment must be before the next element.
if cmnt.pos() >= next_pos.unwrap_or_else(|| cmnt.pos() + BytePos(1)) {
break;
}
// Stop when we find a trailing or a non-mixed comment
match cmnt.style {
CommentStyle::Mixed => continue,
CommentStyle::Trailing => return Some((cmnt, i)),
_ => break,
}
}
None
}
}
struct CommentGatherer<'ast> {
sf: &'ast SourceFile,
sm: &'ast SourceMap,
text: &'ast str,
start_bpos: BytePos,
pos: usize,
comments: Vec<Comment>,
code_to_the_left: bool,
disabled_block_depth: usize,
tab_width: Option<usize>,
}
impl<'ast> CommentGatherer<'ast> {
fn new(
sf: &'ast SourceFile,
sm: &'ast SourceMap,
normalize_cmnts: bool,
tab_width: Option<usize>,
) -> Self {
Self {
sf,
sm,
text: sf.src.as_str(),
start_bpos: sf.start_pos,
pos: 0,
comments: Vec::new(),
code_to_the_left: false,
disabled_block_depth: if normalize_cmnts { 0 } else { 1 },
tab_width,
}
}
/// Consumes the gatherer and returns the collected comments.
fn gather(mut self) -> Self {
for token in solar::parse::Cursor::new(&self.text[self.pos..]) {
self.process_token(token);
}
self
}
/// Post-processes a list of comments to group consecutive comments.
///
/// Necessary for properly indenting multi-line trailing comments, which would
/// otherwise be parsed as a `Trailing` followed by several `Isolated`.
fn group(self) -> Vec<Comment> {
let mut processed = Vec::new();
let mut cursor = self.comments.into_iter().peekable();
while let Some(mut current) = cursor.next() {
if current.kind == CommentKind::Line
&& (current.style.is_trailing() || current.style.is_isolated())
{
let mut ref_line = self.sm.lookup_char_pos(current.span.hi()).line;
while let Some(next_comment) = cursor.peek() {
if !next_comment.style.is_isolated()
|| next_comment.kind != CommentKind::Line
|| ref_line + 1 != self.sm.lookup_char_pos(next_comment.span.lo()).line
{
break;
}
let next_to_merge = cursor.next().unwrap();
current.lines.extend(next_to_merge.lines);
current.span = current.span.to(next_to_merge.span);
ref_line += 1;
}
}
processed.push(current);
}
processed
}
/// Creates a `Span` relative to the source file's start position.
fn make_span(&self, range: std::ops::Range<usize>) -> Span {
Span::new(self.start_bpos + range.start as u32, self.start_bpos + range.end as u32)
}
/// Processes a single token from the source.
fn process_token(&mut self, token: solar::parse::lexer::token::RawToken) {
let token_range = self.pos..self.pos + token.len as usize;
let span = self.make_span(token_range.clone());
let token_text = &self.text[token_range];
// Keep track of disabled blocks
if token_text.trim_start().contains(DISABLE_START) {
self.disabled_block_depth += 1;
} else if token_text.trim_start().contains(DISABLE_END) {
self.disabled_block_depth -= 1;
}
match token.kind {
TokenKind::Whitespace => {
if let Some(mut idx) = token_text.find('\n') {
self.code_to_the_left = false;
while let Some(next_newline) = token_text[idx + 1..].find('\n') {
idx += 1 + next_newline;
let pos = self.pos + idx;
self.comments.push(Comment {
is_doc: false,
kind: CommentKind::Line,
style: CommentStyle::BlankLine,
lines: vec![],
span: self.make_span(pos..pos),
});
// If not disabled, early-exit as we want only a single blank line.
if self.disabled_block_depth == 0 {
break;
}
}
}
}
TokenKind::BlockComment { is_doc, .. } => {
let code_to_the_right = !matches!(
self.text[self.pos + token.len as usize..].chars().next(),
Some('\r' | '\n')
);
let style = match (self.code_to_the_left, code_to_the_right) {
(_, true) => CommentStyle::Mixed,
(false, false) => CommentStyle::Isolated,
(true, false) => CommentStyle::Trailing,
};
let kind = CommentKind::Block;
// Count the number of chars since the start of the line by rescanning.
let pos_in_file = self.start_bpos + BytePos(self.pos as u32);
let line_begin_in_file = line_begin_pos(self.sf, pos_in_file);
let line_begin_pos = (line_begin_in_file - self.start_bpos).to_usize();
let mut col = CharPos(self.text[line_begin_pos..self.pos].chars().count());
// To preserve alignment in multi-line non-doc comments, normalize the block based
// on its least-indented line.
if !is_doc && token_text.contains('\n') {
col = token_text.lines().skip(1).fold(col, |min, line| {
if line.is_empty() {
return min;
}
std::cmp::min(
CharPos(line.chars().count() - line.trim_start().chars().count()),
min,
)
})
};
let lines = self.split_block_comment_into_lines(token_text, is_doc, col);
self.comments.push(Comment { is_doc, kind, style, lines, span })
}
TokenKind::LineComment { is_doc } => {
let line =
if self.disabled_block_depth != 0 { token_text } else { token_text.trim_end() };
self.comments.push(Comment {
is_doc,
kind: CommentKind::Line,
style: if self.code_to_the_left {
CommentStyle::Trailing
} else {
CommentStyle::Isolated
},
lines: vec![line.into()],
span,
});
}
_ => {
self.code_to_the_left = true;
}
}
self.pos += token.len as usize;
}
/// Splits a block comment into lines, ensuring that each line is properly formatted.
fn split_block_comment_into_lines(
&self,
text: &str,
is_doc: bool,
col: CharPos,
) -> Vec<String> {
// if formatting is disabled, return as is
if self.disabled_block_depth != 0 {
return vec![text.into()];
}
let mut res: Vec<String> = vec![];
let mut lines = text.lines();
if let Some(line) = lines.next() {
let line = line.trim_end();
// Ensure first line of a doc comment only has the `/**` decorator
if is_doc && let Some((_, second)) = line.split_once("/**") {
res.push("/**".to_string());
if !second.trim().is_empty() {
let line = normalize_block_comment_ws(second, col).trim_end();
// Ensure last line of a doc comment only has the `*/` decorator
if let Some((first, _)) = line.split_once("*/") {
if !first.trim().is_empty() {
res.push(format_doc_block_comment(first.trim_end(), self.tab_width));
}
res.push(" */".to_string());
} else {
res.push(format_doc_block_comment(line.trim_end(), self.tab_width));
}
}
} else {
res.push(line.to_string());
}
}
for (pos, line) in lines.delimited() {
let line = normalize_block_comment_ws(line, col).trim_end().to_string();
if !is_doc {
res.push(line);
continue;
}
if !pos.is_last {
res.push(format_doc_block_comment(&line, self.tab_width));
} else {
// Ensure last line of a doc comment only has the `*/` decorator
if let Some((first, _)) = line.split_once("*/")
&& !first.trim().is_empty()
{
res.push(format_doc_block_comment(first.trim_end(), self.tab_width));
}
res.push(" */".to_string());
}
}
res
}
}
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
/// whitespace. Note that `k` may be outside bounds of `s`.
fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let mut idx = 0;
for (i, ch) in s.char_indices().take(col.to_usize()) {
if !ch.is_whitespace() {
return None;
}
idx = i + ch.len_utf8();
}
Some(idx)
}
/// Returns `Some(k)` where `k` is the byte offset of the first non-whitespace char. Returns `k = 0`
/// if `s` starts with a non-whitespace char. If `s` only contains whitespaces, returns `None`.
fn first_non_whitespace(s: &str) -> Option<usize> {
let mut len = 0;
for (i, ch) in s.char_indices() {
if ch.is_whitespace() {
len = ch.len_utf8()
} else {
return if i == 0 { Some(0) } else { Some(i + 1 - len) };
}
}
None
}
/// Returns a slice of `s` with a whitespace prefix removed based on `col`. If the first `col` chars
/// of `s` are all whitespace, returns a slice starting after that prefix.
fn normalize_block_comment_ws(s: &str, col: CharPos) -> &str {
let len = s.len();
if let Some(col) = all_whitespace(s, col) {
return if col < len { &s[col..] } else { "" };
}
if let Some(col) = first_non_whitespace(s) {
return &s[col..];
}
s
}
/// Formats a doc block comment line so that they have the ` *` decorator.
fn format_doc_block_comment(line: &str, tab_width: Option<usize>) -> String {
if line.is_empty() {
return (" *").to_string();
}
if let Some((_, rest_of_line)) = line.split_once("*") {
if rest_of_line.is_empty() {
(" *").to_string()
} else if let Some(tab_width) = tab_width {
let mut normalized = String::from(" *");
line_with_tabs(
&mut normalized,
rest_of_line,
tab_width,
Some(Consolidation::MinOneTab),
);
normalized
} else {
format!(" *{rest_of_line}",)
}
} else if let Some(tab_width) = tab_width {
let mut normalized = String::from(" *\t");
line_with_tabs(&mut normalized, line, tab_width, Some(Consolidation::WithoutSpaces));
normalized
} else {
format!(" * {line}")
}
}
pub enum Consolidation {
MinOneTab,
WithoutSpaces,
}
/// Normalizes the leading whitespace of a string slice according to a given tab width.
///
/// It aggregates and converts leading whitespace (spaces and tabs) into a representation that
/// maximizes the amount of tabs.
pub fn line_with_tabs(
output: &mut String,
line: &str,
tab_width: usize,
strategy: Option<Consolidation>,
) {
// Find the end of the leading whitespace (any sequence of spaces and tabs)
let first_non_ws = line.find(|c| c != ' ' && c != '\t').unwrap_or(line.len());
let (leading_ws, rest_of_line) = line.split_at(first_non_ws);
// Compute its equivalent length and derive the required amount of tabs and spaces
let total_width =
leading_ws.chars().fold(0, |width, c| width + if c == ' ' { 1 } else { tab_width });
let (mut num_tabs, mut num_spaces) = (total_width / tab_width, total_width % tab_width);
// Adjust based on the desired config
match strategy {
Some(Consolidation::MinOneTab) => {
if num_tabs == 0 && num_spaces != 0 {
(num_tabs, num_spaces) = (1, 0);
} else if num_spaces != 0 {
(num_tabs, num_spaces) = (num_tabs + 1, 0);
}
}
Some(Consolidation::WithoutSpaces) => {
if num_spaces != 0 {
(num_tabs, num_spaces) = (num_tabs + 1, 0);
}
}
None => (),
};
// Append the normalized indentation and the rest of the line to the output
output.extend(std::iter::repeat_n('\t', num_tabs));
output.extend(std::iter::repeat_n(' ', num_spaces));
output.push_str(rest_of_line);
}
/// Estimates the display width of a string, accounting for tabs.
pub fn estimate_line_width(line: &str, tab_width: usize) -> usize {
line.chars().fold(0, |width, c| width + if c == '\t' { tab_width } else { 1 })
}
/// Returns the `BytePos` of the beginning of the current line.
fn line_begin_pos(sf: &SourceFile, pos: BytePos) -> BytePos {
let pos = sf.relative_position(pos);
let line_index = sf.lookup_line(pos).unwrap();
let line_start_pos = sf.lines()[line_index];
sf.absolute_position(line_start_pos)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/comments/inline_config.rs | crates/common/src/comments/inline_config.rs | use solar::{
interface::{BytePos, RelativeBytePos, SourceMap, Span},
parse::ast::{self, Visit},
};
use std::{
collections::{HashMap, hash_map::Entry},
hash::Hash,
ops::ControlFlow,
};
/// A disabled formatting range.
#[derive(Debug, Clone, Copy)]
struct DisabledRange<T = BytePos> {
/// Start position, inclusive.
lo: T,
/// End position, inclusive.
hi: T,
}
impl DisabledRange<BytePos> {
fn includes(&self, span: Span) -> bool {
span.lo() >= self.lo && span.hi() <= self.hi
}
}
/// An inline config item
#[derive(Clone, Debug)]
pub enum InlineConfigItem<I> {
/// Disables the next code (AST) item regardless of newlines
DisableNextItem(I),
/// Disables formatting on the current line
DisableLine(I),
/// Disables formatting between the next newline and the newline after
DisableNextLine(I),
/// Disables formatting for any code that follows this and before the next "disable-end"
DisableStart(I),
/// Disables formatting for any code that precedes this and after the previous "disable-start"
DisableEnd(I),
}
impl InlineConfigItem<Vec<String>> {
/// Parse an inline config item from a string. Validates IDs against available IDs.
pub fn parse(s: &str, available_ids: &[&str]) -> Result<Self, InvalidInlineConfigItem> {
let (disable, relevant) = s.split_once('(').unwrap_or((s, ""));
let ids = if relevant.is_empty() || relevant == "all)" {
vec!["all".to_string()]
} else {
match relevant.split_once(')') {
Some((id_str, _)) => id_str.split(",").map(|s| s.trim().to_string()).collect(),
None => return Err(InvalidInlineConfigItem::Syntax(s.into())),
}
};
// Validate IDs
let mut invalid_ids = Vec::new();
'ids: for id in &ids {
if id == "all" {
continue;
}
for available_id in available_ids {
if *available_id == id {
continue 'ids;
}
}
invalid_ids.push(id.to_owned());
}
if !invalid_ids.is_empty() {
return Err(InvalidInlineConfigItem::Ids(invalid_ids));
}
let res = match disable {
"disable-next-item" => Self::DisableNextItem(ids),
"disable-line" => Self::DisableLine(ids),
"disable-next-line" => Self::DisableNextLine(ids),
"disable-start" => Self::DisableStart(ids),
"disable-end" => Self::DisableEnd(ids),
s => return Err(InvalidInlineConfigItem::Syntax(s.into())),
};
Ok(res)
}
}
impl std::str::FromStr for InlineConfigItem<()> {
type Err = InvalidInlineConfigItem;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"disable-next-item" => Self::DisableNextItem(()),
"disable-line" => Self::DisableLine(()),
"disable-next-line" => Self::DisableNextLine(()),
"disable-start" => Self::DisableStart(()),
"disable-end" => Self::DisableEnd(()),
s => return Err(InvalidInlineConfigItem::Syntax(s.into())),
})
}
}
#[derive(Debug)]
pub enum InvalidInlineConfigItem {
Syntax(String),
Ids(Vec<String>),
}
impl std::fmt::Display for InvalidInlineConfigItem {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Syntax(s) => write!(f, "invalid inline config item: {s}"),
Self::Ids(ids) => {
write!(f, "unknown id: '{}'", ids.join("', '"))
}
}
}
}
/// A trait for `InlineConfigItem` types that can be iterated over to produce keys for storage.
pub trait ItemIdIterator {
type Item: Eq + Hash + Clone;
fn into_iter(self) -> impl IntoIterator<Item = Self::Item>;
}
impl ItemIdIterator for () {
type Item = ();
fn into_iter(self) -> impl IntoIterator<Item = Self::Item> {
std::iter::once(())
}
}
impl ItemIdIterator for Vec<String> {
type Item = String;
fn into_iter(self) -> impl IntoIterator<Item = Self::Item> {
self
}
}
#[derive(Debug, Default)]
pub struct InlineConfig<I: ItemIdIterator> {
disabled_ranges: HashMap<I::Item, Vec<DisabledRange>>,
}
impl<I: ItemIdIterator> InlineConfig<I> {
/// Build a new inline config with an iterator of inline config items and their locations in a
/// source file.
///
/// # Panics
///
/// Panics if `items` is not sorted in ascending order of [`Span`]s.
pub fn from_ast<'ast>(
items: impl IntoIterator<Item = (Span, InlineConfigItem<I>)>,
ast: &'ast ast::SourceUnit<'ast>,
source_map: &SourceMap,
) -> Self {
Self::build(items, source_map, |offset| NextItemFinder::new(offset).find(ast))
}
fn build(
items: impl IntoIterator<Item = (Span, InlineConfigItem<I>)>,
source_map: &SourceMap,
mut find_next_item: impl FnMut(BytePos) -> Option<Span>,
) -> Self {
let mut cfg = Self::new();
let mut disabled_blocks = HashMap::new();
let mut prev_sp = Span::DUMMY;
for (sp, item) in items {
if cfg!(debug_assertions) {
assert!(sp >= prev_sp, "InlineConfig::new: unsorted items: {sp:?} < {prev_sp:?}");
prev_sp = sp;
}
cfg.disable_item(sp, item, source_map, &mut disabled_blocks, &mut find_next_item);
}
for (id, (_, lo, hi)) in disabled_blocks {
cfg.disable(id, DisabledRange { lo, hi });
}
cfg
}
fn new() -> Self {
Self { disabled_ranges: HashMap::new() }
}
fn disable_many(&mut self, ids: I, range: DisabledRange) {
for id in ids.into_iter() {
self.disable(id, range);
}
}
fn disable(&mut self, id: I::Item, range: DisabledRange) {
self.disabled_ranges.entry(id).or_default().push(range);
}
fn disable_item(
&mut self,
span: Span,
item: InlineConfigItem<I>,
source_map: &SourceMap,
disabled_blocks: &mut HashMap<I::Item, (usize, BytePos, BytePos)>,
find_next_item: &mut dyn FnMut(BytePos) -> Option<Span>,
) {
let result = source_map.span_to_source(span).unwrap();
let file = result.file;
let comment_range = result.data;
let src = file.src.as_str();
match item {
InlineConfigItem::DisableNextItem(ids) => {
if let Some(next_item) = find_next_item(span.hi()) {
self.disable_many(
ids,
DisabledRange { lo: next_item.lo(), hi: next_item.hi() },
);
}
}
InlineConfigItem::DisableLine(ids) => {
let start = src[..comment_range.start].rfind('\n').map_or(0, |i| i);
let end = src[comment_range.end..]
.find('\n')
.map_or(src.len(), |i| comment_range.end + i);
self.disable_many(
ids,
DisabledRange {
lo: file.absolute_position(RelativeBytePos::from_usize(start)),
hi: file.absolute_position(RelativeBytePos::from_usize(end)),
},
);
}
InlineConfigItem::DisableNextLine(ids) => {
if let Some(offset) = src[comment_range.end..].find('\n') {
let next_line = comment_range.end + offset + 1;
if next_line < src.len() {
let end = src[next_line..].find('\n').map_or(src.len(), |i| next_line + i);
self.disable_many(
ids,
DisabledRange {
lo: file.absolute_position(RelativeBytePos::from_usize(
comment_range.start,
)),
hi: file.absolute_position(RelativeBytePos::from_usize(end)),
},
);
}
}
}
InlineConfigItem::DisableStart(ids) => {
for id in ids.into_iter() {
disabled_blocks.entry(id).and_modify(|(depth, _, _)| *depth += 1).or_insert((
1,
span.lo(),
// Use file end as fallback for unclosed blocks
file.absolute_position(RelativeBytePos::from_usize(src.len())),
));
}
}
InlineConfigItem::DisableEnd(ids) => {
for id in ids.into_iter() {
if let Entry::Occupied(mut entry) = disabled_blocks.entry(id) {
let (depth, lo, _) = entry.get_mut();
*depth = depth.saturating_sub(1);
if *depth == 0 {
let lo = *lo;
let (id, _) = entry.remove_entry();
self.disable(id, DisabledRange { lo, hi: span.hi() });
}
}
}
}
}
}
}
impl InlineConfig<()> {
/// Checks if a span is disabled (only applicable when inline config doesn't require an id).
pub fn is_disabled(&self, span: Span) -> bool {
if let Some(ranges) = self.disabled_ranges.get(&()) {
return ranges.iter().any(|range| range.includes(span));
}
false
}
}
impl<I: ItemIdIterator> InlineConfig<I>
where
I::Item: std::borrow::Borrow<str>,
{
/// Checks if a span is disabled for a specific id. Also checks against "all", which disables
/// all rules.
pub fn is_id_disabled(&self, span: Span, id: &str) -> bool {
self.is_id_disabled_inner(span, id)
|| (id != "all" && self.is_id_disabled_inner(span, "all"))
}
fn is_id_disabled_inner(&self, span: Span, id: &str) -> bool {
if let Some(ranges) = self.disabled_ranges.get(id)
&& ranges.iter().any(|range| range.includes(span))
{
return true;
}
false
}
}
macro_rules! find_next_item {
($self:expr, $x:expr, $span:expr, $walk:ident) => {{
let span = $span;
// If the item is *entirely* before the offset, skip traversing it.
if span.hi() < $self.offset {
return ControlFlow::Continue(());
}
// Check if this item starts after the offset.
if span.lo() > $self.offset {
return ControlFlow::Break(span);
}
// Otherwise, continue traversing inside this item.
$self.$walk($x)
}};
}
/// An AST visitor that finds the first `Item` that starts after a given offset.
#[derive(Debug)]
struct NextItemFinder {
/// The offset to search after.
offset: BytePos,
}
impl NextItemFinder {
fn new(offset: BytePos) -> Self {
Self { offset }
}
/// Finds the next AST item or statement which a span that begins after the `offset`.
fn find<'ast>(&mut self, ast: &'ast ast::SourceUnit<'ast>) -> Option<Span> {
match self.visit_source_unit(ast) {
ControlFlow::Break(span) => Some(span),
ControlFlow::Continue(()) => None,
}
}
}
impl<'ast> ast::Visit<'ast> for NextItemFinder {
type BreakValue = Span;
fn visit_item(&mut self, item: &'ast ast::Item<'ast>) -> ControlFlow<Self::BreakValue> {
find_next_item!(self, item, item.span, walk_item)
}
fn visit_stmt(&mut self, stmt: &'ast ast::Stmt<'ast>) -> ControlFlow<Self::BreakValue> {
find_next_item!(self, stmt, stmt.span, walk_stmt)
}
fn visit_yul_stmt(
&mut self,
stmt: &'ast ast::yul::Stmt<'ast>,
) -> ControlFlow<Self::BreakValue> {
find_next_item!(self, stmt, stmt.span, walk_yul_stmt)
}
}
#[cfg(test)]
mod tests {
use super::*;
impl DisabledRange<usize> {
fn to_byte_pos(self) -> DisabledRange<BytePos> {
DisabledRange::<BytePos> {
lo: BytePos::from_usize(self.lo),
hi: BytePos::from_usize(self.hi),
}
}
fn includes(&self, range: std::ops::Range<usize>) -> bool {
self.to_byte_pos().includes(Span::new(
BytePos::from_usize(range.start),
BytePos::from_usize(range.end),
))
}
}
#[test]
fn test_disabled_range_includes() {
let strict = DisabledRange { lo: 10, hi: 20 };
assert!(strict.includes(10..20));
assert!(strict.includes(12..18));
assert!(!strict.includes(5..15)); // Partial overlap fails
}
#[test]
fn test_inline_config_item_from_str() {
assert!(matches!(
"disable-next-item".parse::<InlineConfigItem<()>>().unwrap(),
InlineConfigItem::DisableNextItem(())
));
assert!(matches!(
"disable-line".parse::<InlineConfigItem<()>>().unwrap(),
InlineConfigItem::DisableLine(())
));
assert!(matches!(
"disable-start".parse::<InlineConfigItem<()>>().unwrap(),
InlineConfigItem::DisableStart(())
));
assert!(matches!(
"disable-end".parse::<InlineConfigItem<()>>().unwrap(),
InlineConfigItem::DisableEnd(())
));
assert!("invalid".parse::<InlineConfigItem<()>>().is_err());
}
#[test]
fn test_inline_config_item_parse_with_lints() {
let lint_ids = vec!["lint1", "lint2"];
// No lints = "all"
match InlineConfigItem::parse("disable-line", &lint_ids).unwrap() {
InlineConfigItem::DisableLine(lints) => assert_eq!(lints, vec!["all"]),
_ => panic!("Wrong type"),
}
// Valid single lint
match InlineConfigItem::parse("disable-start(lint1)", &lint_ids).unwrap() {
InlineConfigItem::DisableStart(lints) => assert_eq!(lints, vec!["lint1"]),
_ => panic!("Wrong type"),
}
// Multiple lints with spaces
match InlineConfigItem::parse("disable-end(lint1, lint2)", &lint_ids).unwrap() {
InlineConfigItem::DisableEnd(lints) => assert_eq!(lints, vec!["lint1", "lint2"]),
_ => panic!("Wrong type"),
}
// Invalid lint ID
assert!(matches!(
InlineConfigItem::parse("disable-line(unknown)", &lint_ids),
Err(InvalidInlineConfigItem::Ids(_))
));
// Malformed syntax
assert!(matches!(
InlineConfigItem::parse("disable-line(lint1", &lint_ids),
Err(InvalidInlineConfigItem::Syntax(_))
));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/comments/comment.rs | crates/common/src/comments/comment.rs | //! Modified from [`rustc_ast::util::comments`](https://github.com/rust-lang/rust/blob/07d3fd1d9b9c1f07475b96a9d168564bf528db68/compiler/rustc_ast/src/util/comments.rs).
use solar::parse::{
ast::{CommentKind, Span},
interface::BytePos,
};
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum CommentStyle {
/// No code on either side of each line of the comment
Isolated,
/// Code exists to the left of the comment
Trailing,
/// Code before /* foo */ and after the comment
Mixed,
/// Just a manual blank line "\n\n", for layout
BlankLine,
}
impl CommentStyle {
pub fn is_mixed(&self) -> bool {
matches!(self, Self::Mixed)
}
pub fn is_trailing(&self) -> bool {
matches!(self, Self::Trailing)
}
pub fn is_isolated(&self) -> bool {
matches!(self, Self::Isolated)
}
pub fn is_blank(&self) -> bool {
matches!(self, Self::BlankLine)
}
}
#[derive(Clone, Debug)]
pub struct Comment {
pub lines: Vec<String>,
pub span: Span,
pub style: CommentStyle,
pub is_doc: bool,
pub kind: CommentKind,
}
impl Comment {
pub fn pos(&self) -> BytePos {
self.span.lo()
}
pub fn prefix(&self) -> Option<&'static str> {
if self.lines.is_empty() {
return None;
}
Some(match (self.kind, self.is_doc) {
(CommentKind::Line, false) => "//",
(CommentKind::Line, true) => "///",
(CommentKind::Block, false) => "/*",
(CommentKind::Block, true) => "/**",
})
}
pub fn suffix(&self) -> Option<&'static str> {
if self.lines.is_empty() {
return None;
}
match self.kind {
CommentKind::Line => None,
CommentKind::Block => Some("*/"),
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/io/stdin.rs | crates/common/src/io/stdin.rs | //! Utility functions for reading from [`stdin`](std::io::stdin).
use eyre::Result;
use std::{
error::Error as StdError,
io::{self, BufRead, Read},
str::FromStr,
};
/// Unwraps the given `Option<T>` or [reads stdin into a String](read) and parses it as `T`.
pub fn unwrap<T>(value: Option<T>, read_line: bool) -> Result<T>
where
T: FromStr,
T::Err: StdError + Send + Sync + 'static,
{
match value {
Some(value) => Ok(value),
None => parse(read_line),
}
}
/// Shortcut for `(unwrap(a), unwrap(b))`.
pub fn unwrap2<A, B>(a: Option<A>, b: Option<B>) -> Result<(A, B)>
where
A: FromStr,
B: FromStr,
A::Err: StdError + Send + Sync + 'static,
B::Err: StdError + Send + Sync + 'static,
{
match (a, b) {
(Some(a), Some(b)) => Ok((a, b)),
(a, b) => Ok((unwrap(a, true)?, unwrap(b, true)?)),
}
}
/// [Reads stdin into a String](read) and parses it as `Vec<T>` using whitespaces as delimiters if
/// the given `Vec<T>` is empty.
pub fn unwrap_vec<T>(mut value: Vec<T>) -> Result<Vec<T>>
where
T: FromStr,
T::Err: StdError + Send + Sync + 'static,
{
if value.is_empty() {
let s = read(false)?;
value = s.split_whitespace().map(FromStr::from_str).collect::<Result<Vec<T>, _>>()?;
}
Ok(value)
}
/// Short-hand for `unwrap(value, true)`.
pub fn unwrap_line<T>(value: Option<T>) -> Result<T>
where
T: FromStr,
T::Err: StdError + Send + Sync + 'static,
{
unwrap(value, true)
}
/// Reads bytes from [`stdin`][io::stdin] into a String.
///
/// If `read_line` is true, stop at the first newline (the `0xA` byte).
pub fn parse<T>(read_line: bool) -> Result<T>
where
T: FromStr,
T::Err: StdError + Send + Sync + 'static,
{
read(read_line).and_then(|s| s.parse().map_err(Into::into))
}
/// Short-hand for `parse(true)`.
pub fn parse_line<T>() -> Result<T>
where
T: FromStr,
T::Err: StdError + Send + Sync + 'static,
{
parse(true)
}
/// Reads bytes from [`stdin`][io::stdin] into a String.
///
/// If `read_line` is true, stop at the first newline (the `0xA` byte).
pub fn read(read_line: bool) -> Result<String> {
let bytes = read_bytes(read_line)?;
if read_line {
// SAFETY: [BufRead::read_line] appends into a String
Ok(unsafe { String::from_utf8_unchecked(bytes) })
} else {
String::from_utf8(bytes).map_err(Into::into)
}
}
/// Reads bytes from [`stdin`][io::stdin].
///
/// If `read_line` is true, read up to the first newline excluded (the `0xA` byte).
pub fn read_bytes(read_line: bool) -> Result<Vec<u8>> {
let mut stdin = io::stdin().lock();
if read_line {
let mut buf = String::new();
stdin.read_line(&mut buf)?;
// remove the trailing newline
if let Some(b'\n') = buf.as_bytes().last() {
buf.pop();
if let Some(b'\r') = buf.as_bytes().last() {
buf.pop();
}
}
Ok(buf.into_bytes())
} else {
let mut buf = Vec::new();
stdin.read_to_end(&mut buf)?;
Ok(buf)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/io/shell.rs | crates/common/src/io/shell.rs | //! Utility functions for writing to [`stdout`](std::io::stdout) and [`stderr`](std::io::stderr).
//!
//! Originally from [cargo](https://github.com/rust-lang/cargo/blob/35814255a1dbaeca9219fae81d37a8190050092c/src/cargo/core/shell.rs).
use super::style::*;
use anstream::AutoStream;
use anstyle::Style;
use clap::ValueEnum;
use eyre::Result;
use serde::{Deserialize, Serialize};
use std::{
fmt,
io::{IsTerminal, prelude::*},
ops::DerefMut,
sync::{
Mutex, OnceLock, PoisonError,
atomic::{AtomicBool, Ordering},
},
};
/// Returns the current color choice.
pub fn color_choice() -> ColorChoice {
Shell::get().color_choice()
}
/// Returns the currently set verbosity level.
pub fn verbosity() -> Verbosity {
Shell::get().verbosity()
}
/// Set the verbosity level.
pub fn set_verbosity(verbosity: Verbosity) {
Shell::get().set_verbosity(verbosity);
}
/// Returns whether the output mode is [`OutputMode::Quiet`].
pub fn is_quiet() -> bool {
Shell::get().output_mode().is_quiet()
}
/// Returns whether the output format is [`OutputFormat::Json`].
pub fn is_json() -> bool {
Shell::get().is_json()
}
/// Returns whether the output format is [`OutputFormat::Markdown`].
pub fn is_markdown() -> bool {
Shell::get().is_markdown()
}
/// The global shell instance.
static GLOBAL_SHELL: OnceLock<Mutex<Shell>> = OnceLock::new();
#[derive(Debug, Default, Clone, Copy, PartialEq)]
/// The requested output mode.
pub enum OutputMode {
/// Default output
#[default]
Normal,
/// No output
Quiet,
}
impl OutputMode {
/// Returns true if the output mode is `Normal`.
pub fn is_normal(self) -> bool {
self == Self::Normal
}
/// Returns true if the output mode is `Quiet`.
pub fn is_quiet(self) -> bool {
self == Self::Quiet
}
}
/// The requested output format.
#[derive(Debug, Default, Clone, Copy, PartialEq)]
pub enum OutputFormat {
/// Plain text output.
#[default]
Text,
/// JSON output.
Json,
/// Plain text with markdown tables.
Markdown,
}
impl OutputFormat {
/// Returns true if the output format is `Text`.
pub fn is_text(self) -> bool {
self == Self::Text
}
/// Returns true if the output format is `Json`.
pub fn is_json(self) -> bool {
self == Self::Json
}
/// Returns true if the output format is `Markdown`.
pub fn is_markdown(self) -> bool {
self == Self::Markdown
}
}
/// The verbosity level.
pub type Verbosity = u8;
/// An abstraction around console output that remembers preferences for output
/// verbosity and color.
pub struct Shell {
/// Wrapper around stdout/stderr. This helps with supporting sending
/// output to a memory buffer which is useful for tests.
output: ShellOut,
/// The format to use for message output.
output_format: OutputFormat,
/// The verbosity mode to use for message output.
output_mode: OutputMode,
/// The verbosity level to use for message output.
verbosity: Verbosity,
/// Flag that indicates the current line needs to be cleared before
/// printing. Used when a progress bar is currently displayed.
needs_clear: AtomicBool,
}
impl fmt::Debug for Shell {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("Shell");
s.field("output_format", &self.output_format);
s.field("output_mode", &self.output_mode);
s.field("verbosity", &self.verbosity);
if let ShellOut::Stream { color_choice, .. } = self.output {
s.field("color_choice", &color_choice);
}
s.finish()
}
}
/// A `Write`able object, either with or without color support.
enum ShellOut {
/// Color-enabled stdio, with information on whether color should be used.
Stream {
stdout: AutoStream<std::io::Stdout>,
stderr: AutoStream<std::io::Stderr>,
stderr_tty: bool,
color_choice: ColorChoice,
},
/// A write object that ignores all output.
Empty(std::io::Empty),
}
/// Whether messages should use color output.
#[derive(Debug, Default, PartialEq, Clone, Copy, Serialize, Deserialize, ValueEnum)]
pub enum ColorChoice {
/// Intelligently guess whether to use color output (default).
#[default]
Auto,
/// Force color output.
Always,
/// Force disable color output.
Never,
}
impl Default for Shell {
fn default() -> Self {
Self::new()
}
}
impl Shell {
/// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose
/// output.
pub fn new() -> Self {
Self::new_with(
OutputFormat::Text,
OutputMode::Normal,
ColorChoice::Auto,
Verbosity::default(),
)
}
/// Creates a new shell with the given color choice and verbosity.
pub fn new_with(
format: OutputFormat,
mode: OutputMode,
color: ColorChoice,
verbosity: Verbosity,
) -> Self {
Self {
output: ShellOut::Stream {
stdout: AutoStream::new(std::io::stdout(), color.to_anstream_color_choice()),
stderr: AutoStream::new(std::io::stderr(), color.to_anstream_color_choice()),
color_choice: color,
stderr_tty: std::io::stderr().is_terminal(),
},
output_format: format,
output_mode: mode,
verbosity,
needs_clear: AtomicBool::new(false),
}
}
/// Creates a shell that ignores all output.
pub fn empty() -> Self {
Self {
output: ShellOut::Empty(std::io::empty()),
output_format: OutputFormat::Text,
output_mode: OutputMode::Quiet,
verbosity: 0,
needs_clear: AtomicBool::new(false),
}
}
/// Acquire a lock to the global shell.
///
/// Initializes it with the default values if it has not been set yet.
pub fn get() -> impl DerefMut<Target = Self> + 'static {
GLOBAL_SHELL.get_or_init(Default::default).lock().unwrap_or_else(PoisonError::into_inner)
}
/// Set the global shell.
///
/// # Panics
///
/// Panics if the global shell has already been set.
#[track_caller]
pub fn set(self) {
GLOBAL_SHELL
.set(Mutex::new(self))
.unwrap_or_else(|_| panic!("attempted to set global shell twice"))
}
/// Sets whether the next print should clear the current line and returns the previous value.
pub fn set_needs_clear(&self, needs_clear: bool) -> bool {
self.needs_clear.swap(needs_clear, Ordering::Relaxed)
}
/// Returns `true` if the output format is JSON.
pub fn is_json(&self) -> bool {
self.output_format.is_json()
}
/// Returns `true` if the output format is Markdown.
pub fn is_markdown(&self) -> bool {
self.output_format.is_markdown()
}
/// Returns `true` if the verbosity level is `Quiet`.
pub fn is_quiet(&self) -> bool {
self.output_mode.is_quiet()
}
/// Returns `true` if the `needs_clear` flag is set.
pub fn needs_clear(&self) -> bool {
self.needs_clear.load(Ordering::Relaxed)
}
/// Returns `true` if the `needs_clear` flag is unset.
pub fn is_cleared(&self) -> bool {
!self.needs_clear()
}
/// Gets the output format of the shell.
pub fn output_format(&self) -> OutputFormat {
self.output_format
}
/// Gets the output mode of the shell.
pub fn output_mode(&self) -> OutputMode {
self.output_mode
}
/// Gets the verbosity of the shell when [`OutputMode::Normal`] is set.
pub fn verbosity(&self) -> Verbosity {
self.verbosity
}
/// Sets the verbosity level.
pub fn set_verbosity(&mut self, verbosity: Verbosity) {
self.verbosity = verbosity;
}
/// Gets the current color choice.
///
/// If we are not using a color stream, this will always return `Never`, even if the color
/// choice has been set to something else.
pub fn color_choice(&self) -> ColorChoice {
match self.output {
ShellOut::Stream { color_choice, .. } => color_choice,
ShellOut::Empty(_) => ColorChoice::Never,
}
}
/// Returns `true` if stderr is a tty.
pub fn is_err_tty(&self) -> bool {
match self.output {
ShellOut::Stream { stderr_tty, .. } => stderr_tty,
ShellOut::Empty(_) => false,
}
}
/// Whether `stderr` supports color.
pub fn err_supports_color(&self) -> bool {
match &self.output {
ShellOut::Stream { stderr, .. } => supports_color(stderr.current_choice()),
ShellOut::Empty(_) => false,
}
}
/// Whether `stdout` supports color.
pub fn out_supports_color(&self) -> bool {
match &self.output {
ShellOut::Stream { stdout, .. } => supports_color(stdout.current_choice()),
ShellOut::Empty(_) => false,
}
}
/// Gets a reference to the underlying stdout writer.
pub fn out(&mut self) -> &mut dyn Write {
self.maybe_err_erase_line();
self.output.stdout()
}
/// Gets a reference to the underlying stderr writer.
pub fn err(&mut self) -> &mut dyn Write {
self.maybe_err_erase_line();
self.output.stderr()
}
/// Erase from cursor to end of line if needed.
pub fn maybe_err_erase_line(&mut self) {
if self.err_supports_color() && self.set_needs_clear(false) {
// This is the "EL - Erase in Line" sequence. It clears from the cursor
// to the end of line.
// https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences
let _ = self.output.stderr().write_all(b"\x1B[K");
}
}
/// Prints a red 'error' message. Use the [`sh_err!`] macro instead.
/// This will render a message in [ERROR] style with a bold `Error: ` prefix.
///
/// **Note**: will log regardless of the verbosity level.
pub fn error(&mut self, message: impl fmt::Display) -> Result<()> {
self.maybe_err_erase_line();
self.output.message_stderr(&"Error", &ERROR, Some(&message), false)
}
/// Prints an amber 'warning' message. Use the [`sh_warn!`] macro instead.
/// This will render a message in [WARN] style with a bold `Warning: `prefix.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
pub fn warn(&mut self, message: impl fmt::Display) -> Result<()> {
match self.output_mode {
OutputMode::Quiet => Ok(()),
_ => self.print(&"Warning", &WARN, Some(&message), false),
}
}
/// Write a styled fragment.
///
/// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output.
pub fn write_stdout(&mut self, fragment: impl fmt::Display, color: &Style) -> Result<()> {
self.output.write_stdout(fragment, color)
}
/// Write a styled fragment with the default color. Use the [`sh_print!`] macro instead.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
pub fn print_out(&mut self, fragment: impl fmt::Display) -> Result<()> {
match self.output_mode {
OutputMode::Quiet => Ok(()),
_ => self.write_stdout(fragment, &Style::new()),
}
}
/// Write a styled fragment
///
/// Caller is responsible for deciding whether [`Shell::verbosity`] is affects output.
pub fn write_stderr(&mut self, fragment: impl fmt::Display, color: &Style) -> Result<()> {
self.output.write_stderr(fragment, color)
}
/// Write a styled fragment with the default color. Use the [`sh_eprint!`] macro instead.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
pub fn print_err(&mut self, fragment: impl fmt::Display) -> Result<()> {
match self.output_mode {
OutputMode::Quiet => Ok(()),
_ => self.write_stderr(fragment, &Style::new()),
}
}
/// Prints a message, where the status will have `color` color, and can be justified. The
/// messages follows without color.
fn print(
&mut self,
status: &dyn fmt::Display,
style: &Style,
message: Option<&dyn fmt::Display>,
justified: bool,
) -> Result<()> {
match self.output_mode {
OutputMode::Quiet => Ok(()),
_ => {
self.maybe_err_erase_line();
self.output.message_stderr(status, style, message, justified)
}
}
}
}
impl ShellOut {
/// Prints out a message with a status to stderr. The status comes first, and is bold plus the
/// given color. The status can be justified, in which case the max width that will right
/// align is 12 chars.
fn message_stderr(
&mut self,
status: &dyn fmt::Display,
style: &Style,
message: Option<&dyn fmt::Display>,
justified: bool,
) -> Result<()> {
let buffer = Self::format_message(status, message, style, justified)?;
self.stderr().write_all(&buffer)?;
Ok(())
}
/// Write a styled fragment
fn write_stdout(&mut self, fragment: impl fmt::Display, style: &Style) -> Result<()> {
let mut buffer = Vec::new();
write!(buffer, "{style}{fragment}{style:#}")?;
self.stdout().write_all(&buffer)?;
Ok(())
}
/// Write a styled fragment
fn write_stderr(&mut self, fragment: impl fmt::Display, style: &Style) -> Result<()> {
let mut buffer = Vec::new();
write!(buffer, "{style}{fragment}{style:#}")?;
self.stderr().write_all(&buffer)?;
Ok(())
}
/// Gets stdout as a [`io::Write`](Write) trait object.
fn stdout(&mut self) -> &mut dyn Write {
match self {
Self::Stream { stdout, .. } => stdout,
Self::Empty(e) => e,
}
}
/// Gets stderr as a [`io::Write`](Write) trait object.
fn stderr(&mut self) -> &mut dyn Write {
match self {
Self::Stream { stderr, .. } => stderr,
Self::Empty(e) => e,
}
}
/// Formats a message with a status and optional message.
fn format_message(
status: &dyn fmt::Display,
message: Option<&dyn fmt::Display>,
style: &Style,
justified: bool,
) -> Result<Vec<u8>> {
let bold = anstyle::Style::new().bold();
let mut buffer = Vec::new();
if justified {
write!(buffer, "{style}{status:>12}{style:#}")?;
} else {
write!(buffer, "{style}{status}{style:#}{bold}:{bold:#}")?;
}
match message {
Some(message) => {
writeln!(buffer, " {message}")?;
}
None => write!(buffer, " ")?,
}
Ok(buffer)
}
}
impl ColorChoice {
/// Converts our color choice to [`anstream`]'s version.
fn to_anstream_color_choice(self) -> anstream::ColorChoice {
match self {
Self::Always => anstream::ColorChoice::Always,
Self::Never => anstream::ColorChoice::Never,
Self::Auto => anstream::ColorChoice::Auto,
}
}
}
fn supports_color(choice: anstream::ColorChoice) -> bool {
match choice {
anstream::ColorChoice::Always
| anstream::ColorChoice::AlwaysAnsi
| anstream::ColorChoice::Auto => true,
anstream::ColorChoice::Never => false,
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/io/macros.rs | crates/common/src/io/macros.rs | /// Prints a message to [`stdout`][std::io::stdout] and reads a line from stdin into a String.
///
/// Returns `Result<T>`, so sometimes `T` must be explicitly specified, like in `str::parse`.
///
/// # Examples
///
/// ```no_run
/// use foundry_common::prompt;
///
/// let response: String = prompt!("Would you like to continue? [y/N] ")?;
/// if !matches!(response.as_str(), "y" | "Y") {
/// return Ok(());
/// }
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// ```
#[macro_export]
macro_rules! prompt {
() => {
$crate::stdin::parse_line()
};
($($tt:tt)+) => {{
let _ = $crate::sh_print!($($tt)+);
match ::std::io::Write::flush(&mut ::std::io::stdout()) {
::core::result::Result::Ok(()) => $crate::prompt!(),
::core::result::Result::Err(e) => ::core::result::Result::Err(::eyre::eyre!("Could not flush stdout: {e}"))
}
}};
}
/// Prints a formatted error to stderr.
///
/// **Note**: will log regardless of the verbosity level.
#[macro_export]
macro_rules! sh_err {
($($args:tt)*) => {
$crate::__sh_dispatch!(error $($args)*)
};
}
/// Prints a formatted warning to stderr.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
#[macro_export]
macro_rules! sh_warn {
($($args:tt)*) => {
$crate::__sh_dispatch!(warn $($args)*)
};
}
/// Prints a raw formatted message to stdout.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
#[macro_export]
macro_rules! sh_print {
($($args:tt)*) => {
$crate::__sh_dispatch!(print_out $($args)*)
};
($shell:expr, $($args:tt)*) => {
$crate::__sh_dispatch!(print_out $shell, $($args)*)
};
}
/// Prints a raw formatted message to stderr.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
#[macro_export]
macro_rules! sh_eprint {
($($args:tt)*) => {
$crate::__sh_dispatch!(print_err $($args)*)
};
($shell:expr, $($args:tt)*) => {
$crate::__sh_dispatch!(print_err $shell, $($args)*)
};
}
/// Prints a raw formatted message to stdout, with a trailing newline.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
#[macro_export]
macro_rules! sh_println {
() => {
$crate::sh_print!("\n")
};
($fmt:literal $($args:tt)*) => {
$crate::sh_print!("{}\n", ::core::format_args!($fmt $($args)*))
};
($shell:expr $(,)?) => {
$crate::sh_print!($shell, "\n").expect("failed to write newline")
};
($shell:expr, $($args:tt)*) => {
$crate::sh_print!($shell, "{}\n", ::core::format_args!($($args)*))
};
($($args:tt)*) => {
$crate::sh_print!("{}\n", ::core::format_args!($($args)*))
};
}
/// Prints a raw formatted message to stderr, with a trailing newline.
///
/// **Note**: if `verbosity` is set to `Quiet`, this is a no-op.
#[macro_export]
macro_rules! sh_eprintln {
() => {
$crate::sh_eprint!("\n")
};
($fmt:literal $($args:tt)*) => {
$crate::sh_eprint!("{}\n", ::core::format_args!($fmt $($args)*))
};
($shell:expr $(,)?) => {
$crate::sh_eprint!($shell, "\n")
};
($shell:expr, $($args:tt)*) => {
$crate::sh_eprint!($shell, "{}\n", ::core::format_args!($($args)*))
};
($($args:tt)*) => {
$crate::sh_eprint!("{}\n", ::core::format_args!($($args)*))
};
}
#[doc(hidden)]
#[macro_export]
macro_rules! __sh_dispatch {
($f:ident $fmt:literal $($args:tt)*) => {
$crate::__sh_dispatch!(@impl $f &mut *$crate::Shell::get(), $fmt $($args)*)
};
($f:ident $shell:expr, $($args:tt)*) => {
$crate::__sh_dispatch!(@impl $f $shell, $($args)*)
};
($f:ident $($args:tt)*) => {
$crate::__sh_dispatch!(@impl $f &mut *$crate::Shell::get(), $($args)*)
};
// Ensure that the global shell lock is held for as little time as possible.
// Also avoids deadlocks in case of nested calls.
(@impl $f:ident $shell:expr, $($args:tt)*) => {
match format!($($args)*) {
fmt => $crate::Shell::$f($shell, fmt),
}
};
}
#[cfg(test)]
mod tests {
#[test]
fn macros() -> eyre::Result<()> {
sh_err!("err")?;
sh_err!("err {}", "arg")?;
sh_warn!("warn")?;
sh_warn!("warn {}", "arg")?;
sh_print!("print -")?;
sh_print!("print {} -", "arg")?;
sh_println!()?;
sh_println!("println")?;
sh_println!("println {}", "arg")?;
sh_eprint!("eprint -")?;
sh_eprint!("eprint {} -", "arg")?;
sh_eprintln!()?;
sh_eprintln!("eprintln")?;
sh_eprintln!("eprintln {}", "arg")?;
sh_println!("{:?}", {
sh_println!("hi")?;
solar::data_structures::fmt::from_fn(|f| {
let _ = sh_println!("even more nested");
write!(f, "hi 2")
})
})?;
Ok(())
}
#[test]
fn macros_with_shell() -> eyre::Result<()> {
let shell = &mut crate::Shell::new();
sh_eprintln!(shell)?;
sh_eprintln!(shell,)?;
sh_eprintln!(shell, "shelled eprintln")?;
sh_eprintln!(shell, "shelled eprintln {}", "arg")?;
sh_eprintln!(&mut crate::Shell::new(), "shelled eprintln {}", "arg")?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/io/mod.rs | crates/common/src/io/mod.rs | //! Utilities for working with standard input, output, and error.
#[macro_use]
mod macros;
pub mod shell;
pub mod stdin;
pub mod style;
#[doc(no_inline)]
pub use shell::Shell;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/src/io/style.rs | crates/common/src/io/style.rs | #![allow(missing_docs)]
use anstyle::*;
pub const ERROR: Style = AnsiColor::Red.on_default().effects(Effects::BOLD);
pub const WARN: Style = AnsiColor::Yellow.on_default().effects(Effects::BOLD);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/fmt/src/dynamic.rs | crates/common/fmt/src/dynamic.rs | use super::{format_int_exp, format_uint_exp};
use alloy_dyn_abi::{DynSolType, DynSolValue};
use alloy_primitives::hex;
use eyre::Result;
use serde_json::{Map, Value};
use std::{
collections::{BTreeMap, HashMap},
fmt,
};
/// [`DynSolValue`] formatter.
struct DynValueFormatter {
raw: bool,
}
impl DynValueFormatter {
/// Recursively formats a [`DynSolValue`].
fn value(&self, value: &DynSolValue, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match value {
DynSolValue::Address(inner) => write!(f, "{inner}"),
DynSolValue::Function(inner) => write!(f, "{inner}"),
DynSolValue::Bytes(inner) => f.write_str(&hex::encode_prefixed(inner)),
DynSolValue::FixedBytes(word, size) => {
f.write_str(&hex::encode_prefixed(&word[..*size]))
}
DynSolValue::Uint(inner, _) => {
if self.raw {
write!(f, "{inner}")
} else {
f.write_str(&format_uint_exp(*inner))
}
}
DynSolValue::Int(inner, _) => {
if self.raw {
write!(f, "{inner}")
} else {
f.write_str(&format_int_exp(*inner))
}
}
DynSolValue::Array(values) | DynSolValue::FixedArray(values) => {
f.write_str("[")?;
self.list(values, f)?;
f.write_str("]")
}
DynSolValue::Tuple(values) => self.tuple(values, f),
DynSolValue::String(inner) => {
if self.raw {
write!(f, "{}", inner.escape_debug())
} else {
write!(f, "{inner:?}") // escape strings
}
}
DynSolValue::Bool(inner) => write!(f, "{inner}"),
DynSolValue::CustomStruct { name, prop_names, tuple } => {
if self.raw {
return self.tuple(tuple, f);
}
f.write_str(name)?;
if prop_names.len() == tuple.len() {
f.write_str("({ ")?;
for (i, (prop_name, value)) in std::iter::zip(prop_names, tuple).enumerate() {
if i > 0 {
f.write_str(", ")?;
}
f.write_str(prop_name)?;
f.write_str(": ")?;
self.value(value, f)?;
}
f.write_str(" })")
} else {
self.tuple(tuple, f)
}
}
}
}
/// Recursively formats a comma-separated list of [`DynSolValue`]s.
fn list(&self, values: &[DynSolValue], f: &mut fmt::Formatter<'_>) -> fmt::Result {
for (i, value) in values.iter().enumerate() {
if i > 0 {
f.write_str(", ")?;
}
self.value(value, f)?;
}
Ok(())
}
/// Formats the given values as a tuple.
fn tuple(&self, values: &[DynSolValue], f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("(")?;
self.list(values, f)?;
f.write_str(")")
}
}
/// Wrapper that implements [`Display`](fmt::Display) for a [`DynSolValue`].
struct DynValueDisplay<'a> {
/// The value to display.
value: &'a DynSolValue,
/// The formatter.
formatter: DynValueFormatter,
}
impl<'a> DynValueDisplay<'a> {
/// Creates a new [`Display`](fmt::Display) wrapper for the given value.
fn new(value: &'a DynSolValue, raw: bool) -> Self {
Self { value, formatter: DynValueFormatter { raw } }
}
}
impl fmt::Display for DynValueDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.formatter.value(self.value, f)
}
}
/// Parses string input as Token against the expected ParamType
pub fn parse_tokens<'a, I: IntoIterator<Item = (&'a DynSolType, &'a str)>>(
params: I,
) -> alloy_dyn_abi::Result<Vec<DynSolValue>> {
params.into_iter().map(|(param, value)| DynSolType::coerce_str(param, value)).collect()
}
/// Pretty-prints a slice of tokens using [`format_token`].
pub fn format_tokens(tokens: &[DynSolValue]) -> impl Iterator<Item = String> + '_ {
tokens.iter().map(format_token)
}
/// Pretty-prints a slice of tokens using [`format_token_raw`].
pub fn format_tokens_raw(tokens: &[DynSolValue]) -> impl Iterator<Item = String> + '_ {
tokens.iter().map(format_token_raw)
}
/// Pretty-prints the given value into a string suitable for user output.
pub fn format_token(value: &DynSolValue) -> String {
DynValueDisplay::new(value, false).to_string()
}
/// Pretty-prints the given value into a string suitable for re-parsing as values later.
///
/// This means:
/// - integers are not formatted with exponential notation hints
/// - structs are formatted as tuples, losing the struct and property names
pub fn format_token_raw(value: &DynSolValue) -> String {
DynValueDisplay::new(value, true).to_string()
}
/// Serializes given [DynSolValue] into a [serde_json::Value].
pub fn serialize_value_as_json(
value: DynSolValue,
defs: Option<&StructDefinitions>,
) -> Result<Value> {
if let Some(defs) = defs {
_serialize_value_as_json(value, defs)
} else {
_serialize_value_as_json(value, &StructDefinitions::default())
}
}
fn _serialize_value_as_json(value: DynSolValue, defs: &StructDefinitions) -> Result<Value> {
match value {
DynSolValue::Bool(b) => Ok(Value::Bool(b)),
DynSolValue::String(s) => {
// Strings are allowed to contain stringified JSON objects, so we try to parse it like
// one first.
if let Ok(map) = serde_json::from_str(&s) {
Ok(Value::Object(map))
} else {
Ok(Value::String(s))
}
}
DynSolValue::Bytes(b) => Ok(Value::String(hex::encode_prefixed(b))),
DynSolValue::FixedBytes(b, size) => Ok(Value::String(hex::encode_prefixed(&b[..size]))),
DynSolValue::Int(i, _) => {
if let Ok(n) = i64::try_from(i) {
// Use `serde_json::Number` if the number can be accurately represented.
Ok(Value::Number(n.into()))
} else {
// Otherwise, fallback to its string representation to preserve precision and ensure
// compatibility with alloy's `DynSolType` coercion.
Ok(Value::String(i.to_string()))
}
}
DynSolValue::Uint(i, _) => {
if let Ok(n) = u64::try_from(i) {
// Use `serde_json::Number` if the number can be accurately represented.
Ok(Value::Number(n.into()))
} else {
// Otherwise, fallback to its string representation to preserve precision and ensure
// compatibility with alloy's `DynSolType` coercion.
Ok(Value::String(i.to_string()))
}
}
DynSolValue::Address(a) => Ok(Value::String(a.to_string())),
DynSolValue::Array(e) | DynSolValue::FixedArray(e) => Ok(Value::Array(
e.into_iter().map(|v| _serialize_value_as_json(v, defs)).collect::<Result<_>>()?,
)),
DynSolValue::CustomStruct { name, prop_names, tuple } => {
let values = tuple
.into_iter()
.map(|v| _serialize_value_as_json(v, defs))
.collect::<Result<Vec<_>>>()?;
let mut map: HashMap<String, Value> = prop_names.into_iter().zip(values).collect();
// If the struct def is known, manually build a `Map` to preserve the order.
if let Some(fields) = defs.get(&name)? {
let mut ordered_map = Map::with_capacity(fields.len());
for (field_name, _) in fields {
if let Some(serialized_value) = map.remove(field_name) {
ordered_map.insert(field_name.clone(), serialized_value);
}
}
// Explicitly return a `Value::Object` to avoid ambiguity.
return Ok(Value::Object(ordered_map));
}
// Otherwise, fall back to alphabetical sorting for deterministic output.
Ok(Value::Object(map.into_iter().collect::<Map<String, Value>>()))
}
DynSolValue::Tuple(values) => Ok(Value::Array(
values.into_iter().map(|v| _serialize_value_as_json(v, defs)).collect::<Result<_>>()?,
)),
DynSolValue::Function(_) => eyre::bail!("cannot serialize function pointer"),
}
}
// -- STRUCT DEFINITIONS -------------------------------------------------------
pub type TypeDefMap = BTreeMap<String, Vec<(String, String)>>;
#[derive(Debug, Clone, Default)]
pub struct StructDefinitions(TypeDefMap);
impl From<TypeDefMap> for StructDefinitions {
fn from(map: TypeDefMap) -> Self {
Self::new(map)
}
}
impl StructDefinitions {
pub fn new(map: TypeDefMap) -> Self {
Self(map)
}
pub fn keys(&self) -> impl Iterator<Item = &String> {
self.0.keys()
}
pub fn values(&self) -> impl Iterator<Item = &[(String, String)]> {
self.0.values().map(|v| v.as_slice())
}
pub fn get(&self, key: &str) -> eyre::Result<Option<&[(String, String)]>> {
if let Some(value) = self.0.get(key) {
return Ok(Some(value));
}
let matches: Vec<&[(String, String)]> = self
.0
.iter()
.filter_map(|(k, v)| {
if let Some((_, struct_name)) = k.split_once('.')
&& struct_name == key
{
return Some(v.as_slice());
}
None
})
.collect();
match matches.len() {
0 => Ok(None),
1 => Ok(Some(matches[0])),
_ => eyre::bail!(
"there are several structs with the same name. Use `<contract_name>.{key}` instead."
),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{U256, address};
#[test]
fn parse_hex_uint() {
let ty = DynSolType::Uint(256);
let values = parse_tokens(std::iter::once((&ty, "100"))).unwrap();
assert_eq!(values, [DynSolValue::Uint(U256::from(100), 256)]);
let val: U256 = U256::from(100u64);
let hex_val = format!("0x{val:x}");
let values = parse_tokens(std::iter::once((&ty, hex_val.as_str()))).unwrap();
assert_eq!(values, [DynSolValue::Uint(U256::from(100), 256)]);
}
#[test]
fn format_addr() {
// copied from testcases in https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md
assert_eq!(
format_token(&DynSolValue::Address(address!(
"0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed"
))),
"0x5aAeb6053F3E94C9b9A09f33669435E7Ef1BeAed",
);
// copied from testcases in https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1191.md
assert_ne!(
format_token(&DynSolValue::Address(address!(
"0xFb6916095cA1Df60bb79ce92cE3EA74c37c5d359"
))),
"0xFb6916095cA1Df60bb79ce92cE3EA74c37c5d359"
);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/fmt/src/lib.rs | crates/common/fmt/src/lib.rs | //! Helpers for formatting Ethereum types.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
mod console;
pub use console::{ConsoleFmt, FormatSpec, console_format};
mod dynamic;
pub use dynamic::{
StructDefinitions, TypeDefMap, format_token, format_token_raw, format_tokens,
format_tokens_raw, parse_tokens, serialize_value_as_json,
};
mod exp;
pub use exp::{format_int_exp, format_uint_exp, to_exp_notation};
mod ui;
pub use ui::{EthValue, UIfmt, get_pretty_block_attr, get_pretty_tx_attr};
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/fmt/src/ui.rs | crates/common/fmt/src/ui.rs | //! Helper trait and functions to format Ethereum types.
use alloy_consensus::{
Eip658Value, Receipt, ReceiptWithBloom, Transaction as TxTrait, TxEnvelope, TxType, Typed2718,
};
use alloy_network::{
AnyHeader, AnyReceiptEnvelope, AnyRpcBlock, AnyRpcTransaction, AnyTransactionReceipt,
AnyTxEnvelope, ReceiptResponse,
};
use alloy_primitives::{Address, Bloom, Bytes, FixedBytes, I256, U8, U64, U256, Uint, hex};
use alloy_rpc_types::{
AccessListItem, Block, BlockTransactions, Header, Log, Transaction, TransactionReceipt,
};
use alloy_serde::{OtherFields, WithOtherFields};
use foundry_primitives::{FoundryReceiptEnvelope, FoundryTxReceipt};
use revm::context_interface::transaction::SignedAuthorization;
use serde::Deserialize;
/// length of the name column for pretty formatting `{:>20}{value}`
const NAME_COLUMN_LEN: usize = 20usize;
/// Helper trait to format Ethereum types.
///
/// # Examples
///
/// ```
/// use foundry_common_fmt::UIfmt;
///
/// let boolean: bool = true;
/// let string = boolean.pretty();
/// ```
pub trait UIfmt {
/// Return a prettified string version of the value
fn pretty(&self) -> String;
}
impl<T: UIfmt> UIfmt for &T {
fn pretty(&self) -> String {
(*self).pretty()
}
}
impl<T: UIfmt> UIfmt for Option<T> {
fn pretty(&self) -> String {
if let Some(inner) = self { inner.pretty() } else { String::new() }
}
}
impl<T: UIfmt> UIfmt for [T] {
fn pretty(&self) -> String {
if !self.is_empty() {
let mut s = String::with_capacity(self.len() * 64);
s.push_str("[\n");
for item in self {
for line in item.pretty().lines() {
s.push('\t');
s.push_str(line);
s.push('\n');
}
}
s.push(']');
s
} else {
"[]".to_string()
}
}
}
impl UIfmt for String {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for u64 {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for u128 {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for bool {
fn pretty(&self) -> String {
self.to_string()
}
}
impl<const BITS: usize, const LIMBS: usize> UIfmt for Uint<BITS, LIMBS> {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for I256 {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for Address {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for Bloom {
fn pretty(&self) -> String {
self.to_string()
}
}
impl UIfmt for TxType {
fn pretty(&self) -> String {
(*self as u8).to_string()
}
}
impl UIfmt for Vec<u8> {
fn pretty(&self) -> String {
self[..].pretty()
}
}
impl UIfmt for Bytes {
fn pretty(&self) -> String {
self[..].pretty()
}
}
impl<const N: usize> UIfmt for [u8; N] {
fn pretty(&self) -> String {
self[..].pretty()
}
}
impl<const N: usize> UIfmt for FixedBytes<N> {
fn pretty(&self) -> String {
self[..].pretty()
}
}
impl UIfmt for [u8] {
fn pretty(&self) -> String {
hex::encode_prefixed(self)
}
}
impl UIfmt for Eip658Value {
fn pretty(&self) -> String {
match self {
Self::Eip658(status) => if *status { "1 (success)" } else { "0 (failed)" }.to_string(),
Self::PostState(state) => state.pretty(),
}
}
}
impl UIfmt for AnyTransactionReceipt {
fn pretty(&self) -> String {
let Self {
inner:
TransactionReceipt {
transaction_hash,
transaction_index,
block_hash,
block_number,
from,
to,
gas_used,
contract_address,
effective_gas_price,
inner:
AnyReceiptEnvelope {
r#type: transaction_type,
inner:
ReceiptWithBloom {
receipt: Receipt { status, cumulative_gas_used, logs },
logs_bloom,
},
},
blob_gas_price,
blob_gas_used,
},
other,
} = self;
let mut pretty = format!(
"
blockHash {}
blockNumber {}
contractAddress {}
cumulativeGasUsed {}
effectiveGasPrice {}
from {}
gasUsed {}
logs {}
logsBloom {}
root {}
status {}
transactionHash {}
transactionIndex {}
type {}
blobGasPrice {}
blobGasUsed {}",
block_hash.pretty(),
block_number.pretty(),
contract_address.pretty(),
cumulative_gas_used.pretty(),
effective_gas_price.pretty(),
from.pretty(),
gas_used.pretty(),
serde_json::to_string(&logs).unwrap(),
logs_bloom.pretty(),
self.state_root().pretty(),
status.pretty(),
transaction_hash.pretty(),
transaction_index.pretty(),
transaction_type,
blob_gas_price.pretty(),
blob_gas_used.pretty()
);
if let Some(to) = to {
pretty.push_str(&format!("\nto {}", to.pretty()));
}
// additional captured fields
pretty.push_str(&other.pretty());
pretty
}
}
impl UIfmt for Log {
fn pretty(&self) -> String {
format!(
"
address: {}
blockHash: {}
blockNumber: {}
data: {}
logIndex: {}
removed: {}
topics: {}
transactionHash: {}
transactionIndex: {}",
self.address().pretty(),
self.block_hash.pretty(),
self.block_number.pretty(),
self.data().data.pretty(),
self.log_index.pretty(),
self.removed.pretty(),
self.topics().pretty(),
self.transaction_hash.pretty(),
self.transaction_index.pretty(),
)
}
}
impl<T: UIfmt> UIfmt for Block<T, Header<AnyHeader>> {
fn pretty(&self) -> String {
format!(
"
{}
transactions: {}",
pretty_block_basics(self),
self.transactions.pretty()
)
}
}
impl<T: UIfmt> UIfmt for BlockTransactions<T> {
fn pretty(&self) -> String {
match self {
Self::Hashes(hashes) => hashes.pretty(),
Self::Full(transactions) => transactions.pretty(),
Self::Uncle => String::new(),
}
}
}
impl UIfmt for OtherFields {
fn pretty(&self) -> String {
let mut s = String::with_capacity(self.len() * 30);
if !self.is_empty() {
s.push('\n');
}
for (key, value) in self {
let val = EthValue::from(value.clone()).pretty();
let offset = NAME_COLUMN_LEN.saturating_sub(key.len());
s.push_str(key);
s.extend(std::iter::repeat_n(' ', offset + 1));
s.push_str(&val);
s.push('\n');
}
s
}
}
impl UIfmt for AccessListItem {
fn pretty(&self) -> String {
let mut s = String::with_capacity(42 + self.storage_keys.len() * 66);
s.push_str(self.address.pretty().as_str());
s.push_str(" => ");
s.push_str(self.storage_keys.pretty().as_str());
s
}
}
impl UIfmt for TxEnvelope {
fn pretty(&self) -> String {
match &self {
Self::Eip2930(tx) => format!(
"
accessList {}
chainId {}
gasLimit {}
gasPrice {}
hash {}
input {}
nonce {}
r {}
s {}
to {}
type {}
value {}
yParity {}",
self.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.chain_id().pretty(),
self.gas_limit().pretty(),
self.gas_price().pretty(),
self.tx_hash().pretty(),
self.input().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
Self::Eip1559(tx) => format!(
"
accessList {}
chainId {}
gasLimit {}
hash {}
input {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
type {}
value {}
yParity {}",
self.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.chain_id().pretty(),
self.gas_limit().pretty(),
self.tx_hash().pretty(),
self.input().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
Self::Eip4844(tx) => format!(
"
accessList {}
blobVersionedHashes {}
chainId {}
gasLimit {}
hash {}
input {}
maxFeePerBlobGas {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
type {}
value {}
yParity {}",
self.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.blob_versioned_hashes().unwrap_or(&[]).pretty(),
self.chain_id().pretty(),
self.gas_limit().pretty(),
self.tx_hash().pretty(),
self.input().pretty(),
self.max_fee_per_blob_gas().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
Self::Eip7702(tx) => format!(
"
accessList {}
authorizationList {}
chainId {}
gasLimit {}
hash {}
input {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
type {}
value {}
yParity {}",
self.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.authorization_list()
.as_ref()
.map(|l| l.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.chain_id().pretty(),
self.gas_limit().pretty(),
self.tx_hash().pretty(),
self.input().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
_ => format!(
"
gas {}
gasPrice {}
hash {}
input {}
nonce {}
r {}
s {}
to {}
type {}
v {}
value {}",
self.gas_limit().pretty(),
self.gas_price().pretty(),
self.tx_hash().pretty(),
self.input().pretty(),
self.nonce().pretty(),
self.as_legacy()
.map(|tx| FixedBytes::from(tx.signature().r()).pretty())
.unwrap_or_default(),
self.as_legacy()
.map(|tx| FixedBytes::from(tx.signature().s()).pretty())
.unwrap_or_default(),
self.to().pretty(),
self.ty(),
self.as_legacy()
.map(|tx| (if tx.signature().v() { 1u64 } else { 0 }).pretty())
.unwrap_or_default(),
self.value().pretty(),
),
}
}
}
impl UIfmt for AnyTxEnvelope {
fn pretty(&self) -> String {
match self {
Self::Ethereum(envelop) => envelop.pretty(),
Self::Unknown(tx) => {
format!(
"
hash {}
type {}
{}
",
tx.hash.pretty(),
tx.ty(),
tx.inner.fields.pretty().trim_start(),
)
}
}
}
}
impl UIfmt for Transaction {
fn pretty(&self) -> String {
match &self.inner.inner() {
TxEnvelope::Eip2930(tx) => format!(
"
accessList {}
blockHash {}
blockNumber {}
chainId {}
from {}
gasLimit {}
gasPrice {}
hash {}
input {}
nonce {}
r {}
s {}
to {}
transactionIndex {}
type {}
value {}
yParity {}",
self.inner
.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.block_hash.pretty(),
self.block_number.pretty(),
self.chain_id().pretty(),
self.inner.signer().pretty(),
self.gas_limit().pretty(),
self.gas_price().pretty(),
self.inner.tx_hash().pretty(),
self.input().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.transaction_index.pretty(),
self.inner.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
TxEnvelope::Eip1559(tx) => format!(
"
accessList {}
blockHash {}
blockNumber {}
chainId {}
from {}
gasLimit {}
hash {}
input {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
transactionIndex {}
type {}
value {}
yParity {}",
self.inner
.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.block_hash.pretty(),
self.block_number.pretty(),
self.chain_id().pretty(),
self.inner.signer().pretty(),
self.gas_limit().pretty(),
tx.hash().pretty(),
self.input().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.transaction_index.pretty(),
self.inner.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
TxEnvelope::Eip4844(tx) => format!(
"
accessList {}
blobVersionedHashes {}
blockHash {}
blockNumber {}
chainId {}
from {}
gasLimit {}
hash {}
input {}
maxFeePerBlobGas {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
transactionIndex {}
type {}
value {}
yParity {}",
self.inner
.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.blob_versioned_hashes().unwrap_or(&[]).pretty(),
self.block_hash.pretty(),
self.block_number.pretty(),
self.chain_id().pretty(),
self.inner.signer().pretty(),
self.gas_limit().pretty(),
tx.hash().pretty(),
self.input().pretty(),
self.max_fee_per_blob_gas().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.transaction_index.pretty(),
self.inner.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
TxEnvelope::Eip7702(tx) => format!(
"
accessList {}
authorizationList {}
blockHash {}
blockNumber {}
chainId {}
from {}
gasLimit {}
hash {}
input {}
maxFeePerGas {}
maxPriorityFeePerGas {}
nonce {}
r {}
s {}
to {}
transactionIndex {}
type {}
value {}
yParity {}",
self.inner
.access_list()
.map(|a| a.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.authorization_list()
.as_ref()
.map(|l| l.iter().collect::<Vec<_>>())
.unwrap_or_default()
.pretty(),
self.block_hash.pretty(),
self.block_number.pretty(),
self.chain_id().pretty(),
self.inner.signer().pretty(),
self.gas_limit().pretty(),
tx.hash().pretty(),
self.input().pretty(),
self.max_fee_per_gas().pretty(),
self.max_priority_fee_per_gas().pretty(),
self.nonce().pretty(),
FixedBytes::from(tx.signature().r()).pretty(),
FixedBytes::from(tx.signature().s()).pretty(),
self.to().pretty(),
self.transaction_index.pretty(),
self.inner.ty(),
self.value().pretty(),
(if tx.signature().v() { 1u64 } else { 0 }).pretty(),
),
_ => format!(
"
blockHash {}
blockNumber {}
from {}
gas {}
gasPrice {}
hash {}
input {}
nonce {}
r {}
s {}
to {}
transactionIndex {}
v {}
value {}",
self.block_hash.pretty(),
self.block_number.pretty(),
self.inner.signer().pretty(),
self.gas_limit().pretty(),
self.gas_price().pretty(),
self.inner.tx_hash().pretty(),
self.input().pretty(),
self.nonce().pretty(),
self.inner
.as_legacy()
.map(|tx| FixedBytes::from(tx.signature().r()).pretty())
.unwrap_or_default(),
self.inner
.as_legacy()
.map(|tx| FixedBytes::from(tx.signature().s()).pretty())
.unwrap_or_default(),
self.to().pretty(),
self.transaction_index.pretty(),
self.inner
.as_legacy()
.map(|tx| (if tx.signature().v() { 1u64 } else { 0 }).pretty())
.unwrap_or_default(),
self.value().pretty(),
),
}
}
}
impl UIfmt for Transaction<AnyTxEnvelope> {
fn pretty(&self) -> String {
format!(
"
blockHash {}
blockNumber {}
from {}
transactionIndex {}
effectiveGasPrice {}
{}
",
self.block_hash.pretty(),
self.block_number.pretty(),
self.inner.signer().pretty(),
self.transaction_index.pretty(),
self.effective_gas_price.pretty(),
self.inner.pretty().trim_start(),
)
}
}
impl UIfmt for AnyRpcBlock {
fn pretty(&self) -> String {
self.0.pretty()
}
}
impl UIfmt for AnyRpcTransaction {
fn pretty(&self) -> String {
self.0.pretty()
}
}
impl<T: UIfmt> UIfmt for WithOtherFields<T> {
fn pretty(&self) -> String {
format!("{}{}", self.inner.pretty(), self.other.pretty())
}
}
/// Various numerical ethereum types used for pretty printing
#[derive(Clone, Debug, Deserialize)]
#[serde(untagged)]
#[expect(missing_docs)]
pub enum EthValue {
U64(U64),
Address(Address),
U256(U256),
U64Array(Vec<U64>),
U256Array(Vec<U256>),
Other(serde_json::Value),
}
impl From<serde_json::Value> for EthValue {
fn from(val: serde_json::Value) -> Self {
serde_json::from_value(val).expect("infallible")
}
}
impl UIfmt for EthValue {
fn pretty(&self) -> String {
match self {
Self::U64(num) => num.pretty(),
Self::U256(num) => num.pretty(),
Self::Address(addr) => addr.pretty(),
Self::U64Array(arr) => arr.pretty(),
Self::U256Array(arr) => arr.pretty(),
Self::Other(val) => val.to_string().trim_matches('"').to_string(),
}
}
}
impl UIfmt for SignedAuthorization {
fn pretty(&self) -> String {
let signed_authorization = serde_json::to_string(self).unwrap_or("<invalid>".to_string());
match self.recover_authority() {
Ok(authority) => format!(
"{{recoveredAuthority: {authority}, signedAuthority: {signed_authorization}}}",
),
Err(e) => format!(
"{{recoveredAuthority: <error: {e}>, signedAuthority: {signed_authorization}}}",
),
}
}
}
impl<T> UIfmt for FoundryReceiptEnvelope<T>
where
T: UIfmt + Clone + core::fmt::Debug + PartialEq + Eq,
{
fn pretty(&self) -> String {
let receipt = self.as_receipt();
let deposit_info = match self {
Self::Deposit(d) => {
format!(
"
depositNonce {}
depositReceiptVersion {}",
d.receipt.deposit_nonce.pretty(),
d.receipt.deposit_receipt_version.pretty()
)
}
_ => String::new(),
};
format!(
"
status {}
cumulativeGasUsed {}
logs {}
logsBloom {}
type {}{}",
receipt.status.pretty(),
receipt.cumulative_gas_used.pretty(),
receipt.logs.pretty(),
self.logs_bloom().pretty(),
self.tx_type() as u8,
deposit_info
)
}
}
impl UIfmt for FoundryTxReceipt {
fn pretty(&self) -> String {
let receipt = &self.0.inner;
let other = &self.0.other;
let mut pretty = format!(
"
blockHash {}
blockNumber {}
contractAddress {}
cumulativeGasUsed {}
effectiveGasPrice {}
from {}
gasUsed {}
logs {}
logsBloom {}
root {}
status {}
transactionHash {}
transactionIndex {}
type {}
blobGasPrice {}
blobGasUsed {}",
receipt.block_hash.pretty(),
receipt.block_number.pretty(),
receipt.contract_address.pretty(),
receipt.inner.cumulative_gas_used().pretty(),
receipt.effective_gas_price.pretty(),
receipt.from.pretty(),
receipt.gas_used.pretty(),
serde_json::to_string(receipt.inner.logs()).unwrap(),
receipt.inner.logs_bloom().pretty(),
self.state_root().pretty(),
receipt.inner.status().pretty(),
receipt.transaction_hash.pretty(),
receipt.transaction_index.pretty(),
receipt.inner.tx_type() as u8,
receipt.blob_gas_price.pretty(),
receipt.blob_gas_used.pretty()
);
if let Some(to) = receipt.to {
pretty.push_str(&format!("\nto {}", to.pretty()));
}
// additional captured fields
pretty.push_str(&other.pretty());
pretty
}
}
/// Returns the `UiFmt::pretty()` formatted attribute of the transactions
pub fn get_pretty_tx_attr(transaction: &Transaction<AnyTxEnvelope>, attr: &str) -> Option<String> {
let sig = match &transaction.inner.inner() {
AnyTxEnvelope::Ethereum(envelope) => match &envelope {
TxEnvelope::Eip2930(tx) => Some(tx.signature()),
TxEnvelope::Eip1559(tx) => Some(tx.signature()),
TxEnvelope::Eip4844(tx) => Some(tx.signature()),
TxEnvelope::Eip7702(tx) => Some(tx.signature()),
TxEnvelope::Legacy(tx) => Some(tx.signature()),
},
_ => None,
};
match attr {
"blockHash" | "block_hash" => Some(transaction.block_hash.pretty()),
"blockNumber" | "block_number" => Some(transaction.block_number.pretty()),
"from" => Some(transaction.inner.signer().pretty()),
"gas" => Some(transaction.gas_limit().pretty()),
"gasPrice" | "gas_price" => Some(Transaction::gas_price(transaction).pretty()),
"hash" => Some(alloy_network::TransactionResponse::tx_hash(transaction).pretty()),
"input" => Some(transaction.input().pretty()),
"nonce" => Some(transaction.nonce().to_string()),
"s" => sig.map(|s| FixedBytes::from(s.s()).pretty()),
"r" => sig.map(|s| FixedBytes::from(s.r()).pretty()),
"to" => Some(transaction.to().pretty()),
"transactionIndex" | "transaction_index" => Some(transaction.transaction_index.pretty()),
"v" => sig.map(|s| U8::from_be_slice(&s.as_bytes()[64..]).pretty()),
"value" => Some(transaction.value().pretty()),
_ => None,
}
}
/// Returns the `UiFmt::pretty()` formatted attribute of the given block
pub fn get_pretty_block_attr(block: &AnyRpcBlock, attr: &str) -> Option<String> {
match attr {
"baseFeePerGas" | "base_fee_per_gas" => Some(block.header.base_fee_per_gas.pretty()),
"difficulty" => Some(block.header.difficulty.pretty()),
"extraData" | "extra_data" => Some(block.header.extra_data.pretty()),
"gasLimit" | "gas_limit" => Some(block.header.gas_limit.pretty()),
"gasUsed" | "gas_used" => Some(block.header.gas_used.pretty()),
"hash" => Some(block.header.hash.pretty()),
"logsBloom" | "logs_bloom" => Some(block.header.logs_bloom.pretty()),
"miner" | "author" => Some(block.header.inner.beneficiary.pretty()),
"mixHash" | "mix_hash" => Some(block.header.mix_hash.pretty()),
"nonce" => Some(block.header.nonce.pretty()),
"number" => Some(block.header.number.pretty()),
"parentHash" | "parent_hash" => Some(block.header.parent_hash.pretty()),
"transactionsRoot" | "transactions_root" => Some(block.header.transactions_root.pretty()),
"receiptsRoot" | "receipts_root" => Some(block.header.receipts_root.pretty()),
"sha3Uncles" | "sha_3_uncles" => Some(block.header.ommers_hash.pretty()),
"size" => Some(block.header.size.pretty()),
"stateRoot" | "state_root" => Some(block.header.state_root.pretty()),
"timestamp" => Some(block.header.timestamp.pretty()),
"totalDifficulty" | "total_difficult" => Some(block.header.total_difficulty.pretty()),
"blobGasUsed" | "blob_gas_used" => Some(block.header.blob_gas_used.pretty()),
"excessBlobGas" | "excess_blob_gas" => Some(block.header.excess_blob_gas.pretty()),
"requestsHash" | "requests_hash" => Some(block.header.requests_hash.pretty()),
other => {
if let Some(value) = block.other.get(other) {
let val = EthValue::from(value.clone());
return Some(val.pretty());
}
None
}
}
}
fn pretty_block_basics<T>(block: &Block<T, alloy_rpc_types::Header<AnyHeader>>) -> String {
let Block {
header:
Header {
hash,
size,
total_difficulty,
inner:
AnyHeader {
parent_hash,
ommers_hash,
beneficiary,
state_root,
transactions_root,
receipts_root,
logs_bloom,
difficulty,
number,
gas_limit,
gas_used,
timestamp,
extra_data,
mix_hash,
nonce,
base_fee_per_gas,
withdrawals_root,
blob_gas_used,
excess_blob_gas,
parent_beacon_block_root,
requests_hash,
},
},
uncles: _,
transactions: _,
withdrawals: _,
} = block;
format!(
"
baseFeePerGas {}
difficulty {}
extraData {}
gasLimit {}
gasUsed {}
hash {}
logsBloom {}
miner {}
mixHash {}
nonce {}
number {}
parentHash {}
parentBeaconRoot {}
transactionsRoot {}
receiptsRoot {}
sha3Uncles {}
size {}
stateRoot {}
timestamp {} ({})
withdrawalsRoot {}
totalDifficulty {}
blobGasUsed {}
excessBlobGas {}
requestsHash {}",
base_fee_per_gas.pretty(),
difficulty.pretty(),
extra_data.pretty(),
gas_limit.pretty(),
gas_used.pretty(),
hash.pretty(),
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/fmt/src/console.rs | crates/common/fmt/src/console.rs | use super::UIfmt;
use alloy_primitives::{Address, Bytes, FixedBytes, I256, U256};
use std::fmt::{self, Write};
/// A piece is a portion of the format string which represents the next part to emit.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Piece<'a> {
/// A literal string which should directly be emitted.
String(&'a str),
/// A format specifier which should be replaced with the next argument.
NextArgument(FormatSpec),
}
/// A format specifier.
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub enum FormatSpec {
/// `%s`
#[default]
String,
/// `%d`
Number,
/// `%i`
Integer,
/// `%o`
Object,
/// `%e`, `%18e`
Exponential(Option<usize>),
/// `%x`
Hexadecimal,
}
impl fmt::Display for FormatSpec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("%")?;
match *self {
Self::String => f.write_str("s"),
Self::Number => f.write_str("d"),
Self::Integer => f.write_str("i"),
Self::Object => f.write_str("o"),
Self::Exponential(Some(n)) => write!(f, "{n}e"),
Self::Exponential(None) => f.write_str("e"),
Self::Hexadecimal => f.write_str("x"),
}
}
}
enum ParseArgError {
/// Failed to parse the argument.
Err,
/// Escape `%%`.
Skip,
}
/// Parses a format string into a sequence of [pieces][Piece].
#[derive(Debug)]
pub struct Parser<'a> {
input: &'a str,
chars: std::str::CharIndices<'a>,
}
impl<'a> Parser<'a> {
/// Creates a new parser for the given input.
pub fn new(input: &'a str) -> Self {
Self { input, chars: input.char_indices() }
}
/// Parses a string until the next format specifier.
///
/// `skip` is the number of format specifier characters (`%`) to ignore before returning the
/// string.
fn string(&mut self, start: usize, mut skip: usize) -> &'a str {
while let Some((pos, c)) = self.peek() {
if c == '%' {
if skip == 0 {
return &self.input[start..pos];
}
skip -= 1;
}
self.chars.next();
}
&self.input[start..]
}
/// Parses a format specifier.
///
/// If `Err` is returned, the internal iterator may have been advanced and it may be in an
/// invalid state.
fn argument(&mut self) -> Result<FormatSpec, ParseArgError> {
let (start, ch) = self.peek().ok_or(ParseArgError::Err)?;
let simple_spec = match ch {
's' => Some(FormatSpec::String),
'd' => Some(FormatSpec::Number),
'i' => Some(FormatSpec::Integer),
'o' => Some(FormatSpec::Object),
'e' => Some(FormatSpec::Exponential(None)),
'x' => Some(FormatSpec::Hexadecimal),
// "%%" is a literal '%'.
'%' => return Err(ParseArgError::Skip),
_ => None,
};
if let Some(spec) = simple_spec {
self.chars.next();
return Ok(spec);
}
// %<n>e
if ch.is_ascii_digit() {
let n = self.integer(start);
if let Some((_, 'e')) = self.peek() {
self.chars.next();
return Ok(FormatSpec::Exponential(n));
}
}
Err(ParseArgError::Err)
}
fn integer(&mut self, start: usize) -> Option<usize> {
let mut end = start;
while let Some((pos, ch)) = self.peek() {
if !ch.is_ascii_digit() {
end = pos;
break;
}
self.chars.next();
}
self.input[start..end].parse().ok()
}
fn current_pos(&mut self) -> usize {
self.peek().map(|(n, _)| n).unwrap_or(self.input.len())
}
fn peek(&mut self) -> Option<(usize, char)> {
self.peek_n(0)
}
fn peek_n(&mut self, n: usize) -> Option<(usize, char)> {
self.chars.clone().nth(n)
}
}
impl<'a> Iterator for Parser<'a> {
type Item = Piece<'a>;
fn next(&mut self) -> Option<Self::Item> {
let (mut start, ch) = self.peek()?;
let mut skip = 0;
if ch == '%' {
let prev = self.chars.clone();
self.chars.next();
match self.argument() {
Ok(arg) => {
debug_assert_eq!(arg.to_string(), self.input[start..self.current_pos()]);
return Some(Piece::NextArgument(arg));
}
// Skip the argument if we encountered "%%".
Err(ParseArgError::Skip) => {
start = self.current_pos();
skip += 1;
}
// Reset the iterator if we failed to parse the argument, and include any
// parsed and unparsed specifier in `String`.
Err(ParseArgError::Err) => {
self.chars = prev;
skip += 1;
}
}
}
Some(Piece::String(self.string(start, skip)))
}
}
/// Formats a value using a [FormatSpec].
pub trait ConsoleFmt {
/// Formats a value using a [FormatSpec].
fn fmt(&self, spec: FormatSpec) -> String;
}
impl ConsoleFmt for String {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String => self.clone(),
FormatSpec::Object => format!("'{}'", self.clone()),
FormatSpec::Number
| FormatSpec::Integer
| FormatSpec::Exponential(_)
| FormatSpec::Hexadecimal => Self::from("NaN"),
}
}
}
impl ConsoleFmt for bool {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String => self.pretty(),
FormatSpec::Object => format!("'{}'", self.pretty()),
FormatSpec::Number => (*self as i32).to_string(),
FormatSpec::Integer | FormatSpec::Exponential(_) | FormatSpec::Hexadecimal => {
String::from("NaN")
}
}
}
}
impl ConsoleFmt for U256 {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String | FormatSpec::Object | FormatSpec::Number | FormatSpec::Integer => {
self.pretty()
}
FormatSpec::Hexadecimal => {
let hex = format!("{self:x}");
format!("0x{}", hex.trim_start_matches('0'))
}
FormatSpec::Exponential(None) => {
let log = self.pretty().len() - 1;
let exp10 = Self::from(10).pow(Self::from(log));
let amount = *self;
let integer = amount / exp10;
let decimal = (amount % exp10).to_string();
let decimal = format!("{decimal:0>log$}").trim_end_matches('0').to_string();
if !decimal.is_empty() {
format!("{integer}.{decimal}e{log}")
} else {
format!("{integer}e{log}")
}
}
FormatSpec::Exponential(Some(precision)) => {
let exp10 = Self::from(10).pow(Self::from(precision));
let amount = *self;
let integer = amount / exp10;
let decimal = (amount % exp10).to_string();
let decimal = format!("{decimal:0>precision$}").trim_end_matches('0').to_string();
if !decimal.is_empty() {
format!("{integer}.{decimal}")
} else {
format!("{integer}")
}
}
}
}
}
impl ConsoleFmt for I256 {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String | FormatSpec::Object | FormatSpec::Number | FormatSpec::Integer => {
self.pretty()
}
FormatSpec::Hexadecimal => {
let hex = format!("{self:x}");
format!("0x{}", hex.trim_start_matches('0'))
}
FormatSpec::Exponential(None) => {
let amount = *self;
let sign = if amount.is_negative() { "-" } else { "" };
let log = if amount.is_negative() {
self.pretty().len() - 2
} else {
self.pretty().len() - 1
};
let exp10 = Self::exp10(log);
let integer = (amount / exp10).twos_complement();
let decimal = (amount % exp10).twos_complement().to_string();
let decimal = format!("{decimal:0>log$}").trim_end_matches('0').to_string();
if !decimal.is_empty() {
format!("{sign}{integer}.{decimal}e{log}")
} else {
format!("{sign}{integer}e{log}")
}
}
FormatSpec::Exponential(Some(precision)) => {
let amount = *self;
let sign = if amount.is_negative() { "-" } else { "" };
let exp10 = Self::exp10(precision);
let integer = (amount / exp10).twos_complement();
let decimal = (amount % exp10).twos_complement().to_string();
let decimal = format!("{decimal:0>precision$}").trim_end_matches('0').to_string();
if !decimal.is_empty() {
format!("{sign}{integer}.{decimal}")
} else {
format!("{sign}{integer}")
}
}
}
}
}
impl ConsoleFmt for Address {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String | FormatSpec::Hexadecimal => self.pretty(),
FormatSpec::Object => format!("'{}'", self.pretty()),
FormatSpec::Number | FormatSpec::Integer | FormatSpec::Exponential(_) => {
String::from("NaN")
}
}
}
}
impl ConsoleFmt for Vec<u8> {
fn fmt(&self, spec: FormatSpec) -> String {
self[..].fmt(spec)
}
}
impl ConsoleFmt for Bytes {
fn fmt(&self, spec: FormatSpec) -> String {
self[..].fmt(spec)
}
}
impl<const N: usize> ConsoleFmt for [u8; N] {
fn fmt(&self, spec: FormatSpec) -> String {
self[..].fmt(spec)
}
}
impl<const N: usize> ConsoleFmt for FixedBytes<N> {
fn fmt(&self, spec: FormatSpec) -> String {
self[..].fmt(spec)
}
}
impl ConsoleFmt for [u8] {
fn fmt(&self, spec: FormatSpec) -> String {
match spec {
FormatSpec::String | FormatSpec::Hexadecimal => self.pretty(),
FormatSpec::Object => format!("'{}'", self.pretty()),
FormatSpec::Number | FormatSpec::Integer | FormatSpec::Exponential(_) => {
String::from("NaN")
}
}
}
}
/// Formats a string using the input values.
///
/// Formatting rules are the same as Hardhat. The supported format specifiers are as follows:
/// - %s: Converts the value using its String representation. This is equivalent to applying
/// [`UIfmt::pretty()`] on the format string.
/// - %o: Treats the format value as a javascript "object" and converts it to its string
/// representation.
/// - %d, %i: Converts the value to an integer. If a non-numeric value, such as String or Address,
/// is passed, then the spec is formatted as `NaN`.
/// - %x: Converts the value to a hexadecimal string. If a non-numeric value, such as String or
/// Address, is passed, then the spec is formatted as `NaN`.
/// - %e: Converts the value to an exponential notation string. If a non-numeric value, such as
/// String or Address, is passed, then the spec is formatted as `NaN`.
/// - %%: This is parsed as a single percent sign ('%') without consuming any input value.
///
/// Unformatted values are appended to the end of the formatted output using [`UIfmt::pretty()`].
/// If there are more format specifiers than values, then the remaining unparsed format specifiers
/// appended to the formatted output as-is.
///
/// # Examples
///
/// ```ignore (not implemented for integers)
/// let formatted = foundry_common::fmt::console_format("%s has %d characters", &[&"foo", &3]);
/// assert_eq!(formatted, "foo has 3 characters");
/// ```
pub fn console_format(spec: &str, values: &[&dyn ConsoleFmt]) -> String {
let mut values = values.iter().copied();
let mut result = String::with_capacity(spec.len());
// for the first space
let mut write_space = if spec.is_empty() {
false
} else {
format_spec(spec, &mut values, &mut result);
true
};
// append any remaining values with the standard format
for v in values {
let fmt = v.fmt(FormatSpec::String);
if write_space {
result.push(' ');
}
result.push_str(&fmt);
write_space = true;
}
result
}
fn format_spec<'a>(
s: &str,
mut values: impl Iterator<Item = &'a dyn ConsoleFmt>,
result: &mut String,
) {
for piece in Parser::new(s) {
match piece {
Piece::String(s) => result.push_str(s),
Piece::NextArgument(spec) => {
if let Some(value) = values.next() {
result.push_str(&value.fmt(spec));
} else {
// Write the format specifier as-is if there are no more values.
write!(result, "{spec}").unwrap();
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{B256, address};
use foundry_macros::ConsoleFmt;
use std::str::FromStr;
macro_rules! logf1 {
($a:ident) => {
console_format(&$a.p_0, &[&$a.p_1])
};
}
macro_rules! logf2 {
($a:ident) => {
console_format(&$a.p_0, &[&$a.p_1, &$a.p_2])
};
}
macro_rules! logf3 {
($a:ident) => {
console_format(&$a.p_0, &[&$a.p_1, &$a.p_2, &$a.p_3])
};
}
#[derive(Clone, Debug, ConsoleFmt)]
struct Log1 {
p_0: String,
p_1: U256,
}
#[derive(Clone, Debug, ConsoleFmt)]
struct Log2 {
p_0: String,
p_1: bool,
p_2: U256,
}
#[derive(Clone, Debug, ConsoleFmt)]
struct Log3 {
p_0: String,
p_1: Address,
p_2: bool,
p_3: U256,
}
#[expect(unused)]
#[derive(Clone, Debug, ConsoleFmt)]
enum Logs {
Log1(Log1),
Log2(Log2),
Log3(Log3),
}
#[test]
fn test_console_log_format_specifiers() {
let fmt_1 = |spec: &str, arg: &dyn ConsoleFmt| console_format(spec, &[arg]);
assert_eq!("foo", fmt_1("%s", &String::from("foo")));
assert_eq!("NaN", fmt_1("%d", &String::from("foo")));
assert_eq!("NaN", fmt_1("%i", &String::from("foo")));
assert_eq!("NaN", fmt_1("%e", &String::from("foo")));
assert_eq!("NaN", fmt_1("%x", &String::from("foo")));
assert_eq!("'foo'", fmt_1("%o", &String::from("foo")));
assert_eq!("%s foo", fmt_1("%%s", &String::from("foo")));
assert_eq!("% foo", fmt_1("%", &String::from("foo")));
assert_eq!("% foo", fmt_1("%%", &String::from("foo")));
assert_eq!("true", fmt_1("%s", &true));
assert_eq!("1", fmt_1("%d", &true));
assert_eq!("0", fmt_1("%d", &false));
assert_eq!("NaN", fmt_1("%i", &true));
assert_eq!("NaN", fmt_1("%e", &true));
assert_eq!("NaN", fmt_1("%x", &true));
assert_eq!("'true'", fmt_1("%o", &true));
let b32 =
B256::from_str("0xdeadbeef00000000000000000000000000000000000000000000000000000000")
.unwrap();
assert_eq!(
"0xdeadbeef00000000000000000000000000000000000000000000000000000000",
fmt_1("%s", &b32)
);
assert_eq!(
"0xdeadbeef00000000000000000000000000000000000000000000000000000000",
fmt_1("%x", &b32)
);
assert_eq!("NaN", fmt_1("%d", &b32));
assert_eq!("NaN", fmt_1("%i", &b32));
assert_eq!("NaN", fmt_1("%e", &b32));
assert_eq!(
"'0xdeadbeef00000000000000000000000000000000000000000000000000000000'",
fmt_1("%o", &b32)
);
let addr = address!("0xdEADBEeF00000000000000000000000000000000");
assert_eq!("0xdEADBEeF00000000000000000000000000000000", fmt_1("%s", &addr));
assert_eq!("NaN", fmt_1("%d", &addr));
assert_eq!("NaN", fmt_1("%i", &addr));
assert_eq!("NaN", fmt_1("%e", &addr));
assert_eq!("0xdEADBEeF00000000000000000000000000000000", fmt_1("%x", &addr));
assert_eq!("'0xdEADBEeF00000000000000000000000000000000'", fmt_1("%o", &addr));
let bytes = Bytes::from_str("0xdeadbeef").unwrap();
assert_eq!("0xdeadbeef", fmt_1("%s", &bytes));
assert_eq!("NaN", fmt_1("%d", &bytes));
assert_eq!("NaN", fmt_1("%i", &bytes));
assert_eq!("NaN", fmt_1("%e", &bytes));
assert_eq!("0xdeadbeef", fmt_1("%x", &bytes));
assert_eq!("'0xdeadbeef'", fmt_1("%o", &bytes));
assert_eq!("100", fmt_1("%s", &U256::from(100)));
assert_eq!("100", fmt_1("%d", &U256::from(100)));
assert_eq!("100", fmt_1("%i", &U256::from(100)));
assert_eq!("1e2", fmt_1("%e", &U256::from(100)));
assert_eq!("1.0023e6", fmt_1("%e", &U256::from(1002300)));
assert_eq!("1.23e5", fmt_1("%e", &U256::from(123000)));
assert_eq!("0x64", fmt_1("%x", &U256::from(100)));
assert_eq!("100", fmt_1("%o", &U256::from(100)));
assert_eq!("100", fmt_1("%s", &I256::try_from(100).unwrap()));
assert_eq!("100", fmt_1("%d", &I256::try_from(100).unwrap()));
assert_eq!("100", fmt_1("%i", &I256::try_from(100).unwrap()));
assert_eq!("1e2", fmt_1("%e", &I256::try_from(100).unwrap()));
assert_eq!("-1e2", fmt_1("%e", &I256::try_from(-100).unwrap()));
assert_eq!("-1.0023e6", fmt_1("%e", &I256::try_from(-1002300).unwrap()));
assert_eq!("-1.23e5", fmt_1("%e", &I256::try_from(-123000).unwrap()));
assert_eq!("1.0023e6", fmt_1("%e", &I256::try_from(1002300).unwrap()));
assert_eq!("1.23e5", fmt_1("%e", &I256::try_from(123000).unwrap()));
// %ne
assert_eq!("10", fmt_1("%1e", &I256::try_from(100).unwrap()));
assert_eq!("-1", fmt_1("%2e", &I256::try_from(-100).unwrap()));
assert_eq!("123000", fmt_1("%0e", &I256::try_from(123000).unwrap()));
assert_eq!("12300", fmt_1("%1e", &I256::try_from(123000).unwrap()));
assert_eq!("0.0123", fmt_1("%7e", &I256::try_from(123000).unwrap()));
assert_eq!("-0.0123", fmt_1("%7e", &I256::try_from(-123000).unwrap()));
assert_eq!("0x64", fmt_1("%x", &I256::try_from(100).unwrap()));
assert_eq!(
"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9c",
fmt_1("%x", &I256::try_from(-100).unwrap())
);
assert_eq!(
"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffe8b7891800",
fmt_1("%x", &I256::try_from(-100000000000i64).unwrap())
);
assert_eq!("100", fmt_1("%o", &I256::try_from(100).unwrap()));
// make sure that %byte values are not consumed when there are no values
assert_eq!("%333d%3e%5F", console_format("%333d%3e%5F", &[]));
assert_eq!(
"%5d123456.789%2f%3f%e1",
console_format("%5d%3e%2f%3f%e1", &[&U256::from(123456789)])
);
}
#[test]
fn test_console_log_format() {
let mut log1 = Log1 { p_0: "foo %s".to_string(), p_1: U256::from(100) };
assert_eq!("foo 100", logf1!(log1));
log1.p_0 = String::from("foo");
assert_eq!("foo 100", logf1!(log1));
log1.p_0 = String::from("%s foo");
assert_eq!("100 foo", logf1!(log1));
let mut log2 = Log2 { p_0: "foo %s %s".to_string(), p_1: true, p_2: U256::from(100) };
assert_eq!("foo true 100", logf2!(log2));
log2.p_0 = String::from("foo");
assert_eq!("foo true 100", logf2!(log2));
log2.p_0 = String::from("%s %s foo");
assert_eq!("true 100 foo", logf2!(log2));
let log3 = Log3 {
p_0: String::from("foo %s %%s %s and %d foo %%"),
p_1: address!("0xdEADBEeF00000000000000000000000000000000"),
p_2: true,
p_3: U256::from(21),
};
assert_eq!(
"foo 0xdEADBEeF00000000000000000000000000000000 %s true and 21 foo %",
logf3!(log3)
);
// %ne
let log4 = Log1 { p_0: String::from("%5e"), p_1: U256::from(123456789) };
assert_eq!("1234.56789", logf1!(log4));
let log5 = Log1 { p_0: String::from("foo %3e bar"), p_1: U256::from(123456789) };
assert_eq!("foo 123456.789 bar", logf1!(log5));
let log6 =
Log2 { p_0: String::from("%e and %12e"), p_1: false, p_2: U256::from(123456789) };
assert_eq!("NaN and 0.000123456789", logf2!(log6));
}
#[test]
fn test_derive_format() {
let log1 = Log1 { p_0: String::from("foo %s bar"), p_1: U256::from(42) };
assert_eq!(log1.fmt(Default::default()), "foo 42 bar");
let call = Logs::Log1(log1);
assert_eq!(call.fmt(Default::default()), "foo 42 bar");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/common/fmt/src/exp.rs | crates/common/fmt/src/exp.rs | use alloy_primitives::{I256, Sign, U256};
use yansi::Paint;
/// Returns the number expressed as a string in exponential notation
/// with the given precision (number of significant figures),
/// optionally removing trailing zeros from the mantissa.
///
/// Examples:
///
/// ```text
/// precision = 4, trim_end_zeroes = false
/// 1234124124 -> 1.234e9
/// 10000000 -> 1.000e7
/// precision = 3, trim_end_zeroes = true
/// 1234124124 -> 1.23e9
/// 10000000 -> 1e7
/// ```
pub fn to_exp_notation(value: U256, precision: usize, trim_end_zeros: bool, sign: Sign) -> String {
let stringified = value.to_string();
let exponent = stringified.len() - 1;
let mut mantissa = stringified.chars().take(precision).collect::<String>();
// optionally remove trailing zeros
if trim_end_zeros {
mantissa = mantissa.trim_end_matches('0').to_string();
}
// Place a decimal point only if needed
// e.g. 1234 -> 1.234e3 (needed)
// 5 -> 5 (not needed)
if mantissa.len() > 1 {
mantissa.insert(1, '.');
}
format!("{sign}{mantissa}e{exponent}")
}
/// Formats a U256 number to string, adding an exponential notation _hint_ if it
/// is larger than `10_000`, with a precision of `4` figures, and trimming the
/// trailing zeros.
///
/// # Examples
///
/// ```
/// use alloy_primitives::U256;
/// use foundry_common_fmt::format_uint_exp as f;
///
/// # yansi::disable();
/// assert_eq!(f(U256::from(0)), "0");
/// assert_eq!(f(U256::from(1234)), "1234");
/// assert_eq!(f(U256::from(1234567890)), "1234567890 [1.234e9]");
/// assert_eq!(f(U256::from(1000000000000000000_u128)), "1000000000000000000 [1e18]");
/// assert_eq!(f(U256::from(10000000000000000000000_u128)), "10000000000000000000000 [1e22]");
/// ```
pub fn format_uint_exp(num: U256) -> String {
if num < U256::from(10_000) {
return num.to_string();
}
let exp = to_exp_notation(num, 4, true, Sign::Positive);
format!("{num} {}", format!("[{exp}]").dim())
}
/// Formats a U256 number to string, adding an exponential notation _hint_.
///
/// Same as [`format_uint_exp`].
///
/// # Examples
///
/// ```
/// use alloy_primitives::I256;
/// use foundry_common_fmt::format_int_exp as f;
///
/// # yansi::disable();
/// assert_eq!(f(I256::try_from(0).unwrap()), "0");
/// assert_eq!(f(I256::try_from(-1).unwrap()), "-1");
/// assert_eq!(f(I256::try_from(1234).unwrap()), "1234");
/// assert_eq!(f(I256::try_from(1234567890).unwrap()), "1234567890 [1.234e9]");
/// assert_eq!(f(I256::try_from(-1234567890).unwrap()), "-1234567890 [-1.234e9]");
/// assert_eq!(f(I256::try_from(1000000000000000000_u128).unwrap()), "1000000000000000000 [1e18]");
/// assert_eq!(
/// f(I256::try_from(10000000000000000000000_u128).unwrap()),
/// "10000000000000000000000 [1e22]"
/// );
/// assert_eq!(
/// f(I256::try_from(-10000000000000000000000_i128).unwrap()),
/// "-10000000000000000000000 [-1e22]"
/// );
/// ```
pub fn format_int_exp(num: I256) -> String {
let (sign, abs) = num.into_sign_and_abs();
if abs < U256::from(10_000) {
return format!("{sign}{abs}");
}
let exp = to_exp_notation(abs, 4, true, sign);
format!("{sign}{abs} {}", format!("[{exp}]").dim())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_format_to_exponential_notation() {
let value = 1234124124u64;
let formatted = to_exp_notation(U256::from(value), 4, false, Sign::Positive);
assert_eq!(formatted, "1.234e9");
let formatted = to_exp_notation(U256::from(value), 3, true, Sign::Positive);
assert_eq!(formatted, "1.23e9");
let value = 10000000u64;
let formatted = to_exp_notation(U256::from(value), 4, false, Sign::Positive);
assert_eq!(formatted, "1.000e7");
let formatted = to_exp_notation(U256::from(value), 3, true, Sign::Positive);
assert_eq!(formatted, "1e7");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/linking/src/lib.rs | crates/linking/src/lib.rs | //! # foundry-linking
//!
//! EVM bytecode linker.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
use alloy_primitives::{Address, B256, Bytes};
use foundry_compilers::{
Artifact, ArtifactId,
artifacts::{CompactBytecode, CompactContractBytecodeCow, Libraries},
contracts::ArtifactContracts,
};
use rayon::prelude::*;
use semver::Version;
use std::{
collections::{BTreeMap, BTreeSet},
path::{Path, PathBuf},
str::FromStr,
};
/// Errors that can occur during linking.
#[derive(Debug, thiserror::Error)]
pub enum LinkerError {
#[error("wasn't able to find artifact for library {name} at {file}")]
MissingLibraryArtifact { file: String, name: String },
#[error("target artifact is not present in provided artifacts set")]
MissingTargetArtifact,
#[error(transparent)]
InvalidAddress(<Address as std::str::FromStr>::Err),
#[error("cyclic dependency found, can't link libraries via CREATE2")]
CyclicDependency,
#[error("failed linking {artifact}")]
LinkingFailed { artifact: String },
}
pub struct Linker<'a> {
/// Root of the project, used to determine whether artifact/library path can be stripped.
pub root: PathBuf,
/// Compilation artifacts.
pub contracts: ArtifactContracts<CompactContractBytecodeCow<'a>>,
}
/// Output of the `link_with_nonce_or_address`
pub struct LinkOutput {
/// Resolved library addresses. Contains both user-provided and newly deployed libraries.
/// It will always contain library paths with stripped path prefixes.
pub libraries: Libraries,
/// Vector of libraries that need to be deployed from sender address.
/// The order in which they appear in the vector is the order in which they should be deployed.
pub libs_to_deploy: Vec<Bytes>,
}
impl<'a> Linker<'a> {
pub fn new(
root: impl Into<PathBuf>,
contracts: ArtifactContracts<CompactContractBytecodeCow<'a>>,
) -> Self {
Linker { root: root.into(), contracts }
}
/// Helper method to convert [ArtifactId] to the format in which libraries are stored in
/// [Libraries] object.
///
/// Strips project root path from source file path.
fn convert_artifact_id_to_lib_path(&self, id: &ArtifactId) -> (PathBuf, String) {
let path = id.source.strip_prefix(self.root.as_path()).unwrap_or(&id.source);
// name is either {LibName} or {LibName}.{version}
let name = id.name.split('.').next().unwrap();
(path.to_path_buf(), name.to_owned())
}
/// Finds an [ArtifactId] object in the given [ArtifactContracts] keys which corresponds to the
/// library path in the form of "./path/to/Lib.sol:Lib"
///
/// Optionally accepts solc version, and if present, only compares artifacts with given version.
fn find_artifact_id_by_library_path(
&'a self,
file: &str,
name: &str,
version: Option<&Version>,
) -> Option<&'a ArtifactId> {
for id in self.contracts.keys() {
if let Some(version) = version
&& id.version != *version
{
continue;
}
let (artifact_path, artifact_name) = self.convert_artifact_id_to_lib_path(id);
if artifact_name == *name && artifact_path == Path::new(file) {
return Some(id);
}
}
None
}
/// Performs DFS on the graph of link references, and populates `deps` with all found libraries.
fn collect_dependencies(
&'a self,
target: &'a ArtifactId,
deps: &mut BTreeSet<&'a ArtifactId>,
) -> Result<(), LinkerError> {
let contract = self.contracts.get(target).ok_or(LinkerError::MissingTargetArtifact)?;
let mut references: BTreeMap<String, BTreeSet<String>> = BTreeMap::new();
let mut extend = |bytecode: &CompactBytecode| {
for (file, libs) in &bytecode.link_references {
references.entry(file.clone()).or_default().extend(libs.keys().cloned());
}
};
if let Some(bytecode) = &contract.bytecode {
extend(bytecode);
}
if let Some(deployed_bytecode) = &contract.deployed_bytecode
&& let Some(bytecode) = &deployed_bytecode.bytecode
{
extend(bytecode);
}
for (file, libs) in references {
for name in libs {
let id = self
.find_artifact_id_by_library_path(&file, &name, Some(&target.version))
.ok_or_else(|| LinkerError::MissingLibraryArtifact {
file: file.clone(),
name,
})?;
if deps.insert(id) {
self.collect_dependencies(id, deps)?;
}
}
}
Ok(())
}
/// Links given artifact with either given library addresses or address computed from sender and
/// nonce.
///
/// Each key in `libraries` should either be a global path or relative to project root. All
/// remappings should be resolved.
///
/// When calling for `target` being an external library itself, you should check that `target`
/// does not appear in `libs_to_deploy` to avoid deploying it twice. It may happen in cases
/// when there is a dependency cycle including `target`.
pub fn link_with_nonce_or_address(
&'a self,
libraries: Libraries,
sender: Address,
mut nonce: u64,
targets: impl IntoIterator<Item = &'a ArtifactId>,
) -> Result<LinkOutput, LinkerError> {
// Library paths in `link_references` keys are always stripped, so we have to strip
// user-provided paths to be able to match them correctly.
let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path());
let mut needed_libraries = BTreeSet::new();
for target in targets {
self.collect_dependencies(target, &mut needed_libraries)?;
}
let mut libs_to_deploy = Vec::new();
// If `libraries` does not contain needed dependency, compute its address and add to
// `libs_to_deploy`.
for id in needed_libraries {
let (lib_path, lib_name) = self.convert_artifact_id_to_lib_path(id);
libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| {
let address = sender.create(nonce);
libs_to_deploy.push((id, address));
nonce += 1;
address.to_checksum(None)
});
}
// Link and collect bytecodes for `libs_to_deploy`.
let libs_to_deploy = libs_to_deploy
.into_par_iter()
.map(|(id, _)| {
Ok(self.link(id, &libraries)?.get_bytecode_bytes().unwrap().into_owned())
})
.collect::<Result<Vec<_>, LinkerError>>()?;
Ok(LinkOutput { libraries, libs_to_deploy })
}
pub fn link_with_create2(
&'a self,
libraries: Libraries,
sender: Address,
salt: B256,
target: &'a ArtifactId,
) -> Result<LinkOutput, LinkerError> {
// Library paths in `link_references` keys are always stripped, so we have to strip
// user-provided paths to be able to match them correctly.
let mut libraries = libraries.with_stripped_file_prefixes(self.root.as_path());
let mut needed_libraries = BTreeSet::new();
self.collect_dependencies(target, &mut needed_libraries)?;
let mut needed_libraries = needed_libraries
.into_par_iter()
.filter(|id| {
// Filter out already provided libraries.
let (file, name) = self.convert_artifact_id_to_lib_path(id);
!libraries.libs.contains_key(&file) || !libraries.libs[&file].contains_key(&name)
})
.map(|id| {
// Link library with provided libs and extract bytecode object (possibly unlinked).
let bytecode = self.link(id, &libraries).unwrap().bytecode.unwrap();
(id, bytecode)
})
.collect::<Vec<_>>();
let mut libs_to_deploy = Vec::new();
// Iteratively compute addresses and link libraries until we have no unlinked libraries
// left.
while !needed_libraries.is_empty() {
// Find any library which is fully linked.
let deployable = needed_libraries
.iter()
.enumerate()
.find(|(_, (_, bytecode))| !bytecode.object.is_unlinked());
// If we haven't found any deployable library, it means we have a cyclic dependency.
let Some((index, &(id, _))) = deployable else {
return Err(LinkerError::CyclicDependency);
};
let (_, bytecode) = needed_libraries.swap_remove(index);
let code = bytecode.bytes().unwrap();
let address = sender.create2_from_code(salt, code);
libs_to_deploy.push(code.clone());
let (file, name) = self.convert_artifact_id_to_lib_path(id);
needed_libraries.par_iter_mut().for_each(|(_, bytecode)| {
bytecode.to_mut().link(&file.to_string_lossy(), &name, address);
});
libraries.libs.entry(file).or_default().insert(name, address.to_checksum(None));
}
Ok(LinkOutput { libraries, libs_to_deploy })
}
/// Links given artifact with given libraries.
pub fn link(
&self,
target: &ArtifactId,
libraries: &Libraries,
) -> Result<CompactContractBytecodeCow<'a>, LinkerError> {
let mut contract =
self.contracts.get(target).ok_or(LinkerError::MissingTargetArtifact)?.clone();
for (file, libs) in &libraries.libs {
for (name, address) in libs {
let address = Address::from_str(address).map_err(LinkerError::InvalidAddress)?;
if let Some(bytecode) = contract.bytecode.as_mut() {
bytecode.to_mut().link(&file.to_string_lossy(), name, address);
}
if let Some(deployed_bytecode) =
contract.deployed_bytecode.as_mut().and_then(|b| b.to_mut().bytecode.as_mut())
{
deployed_bytecode.link(&file.to_string_lossy(), name, address);
}
}
}
Ok(contract)
}
/// Ensures that both initial and deployed bytecode are linked.
pub fn ensure_linked(
&self,
contract: &CompactContractBytecodeCow<'a>,
target: &ArtifactId,
) -> Result<(), LinkerError> {
if let Some(bytecode) = &contract.bytecode
&& bytecode.object.is_unlinked()
{
return Err(LinkerError::LinkingFailed {
artifact: target.source.to_string_lossy().into(),
});
}
if let Some(deployed_bytecode) = &contract.deployed_bytecode
&& let Some(deployed_bytecode_obj) = &deployed_bytecode.bytecode
&& deployed_bytecode_obj.object.is_unlinked()
{
return Err(LinkerError::LinkingFailed {
artifact: target.source.to_string_lossy().into(),
});
}
Ok(())
}
pub fn get_linked_artifacts(
&self,
libraries: &Libraries,
) -> Result<ArtifactContracts, LinkerError> {
self.get_linked_artifacts_cow(libraries).map(ArtifactContracts::from_iter)
}
pub fn get_linked_artifacts_cow(
&self,
libraries: &Libraries,
) -> Result<ArtifactContracts<CompactContractBytecodeCow<'a>>, LinkerError> {
self.contracts
.par_iter()
.map(|(id, _)| Ok((id.clone(), self.link(id, libraries)?)))
.collect::<Result<_, _>>()
.map(ArtifactContracts)
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{address, fixed_bytes, map::HashMap};
use foundry_compilers::{
Project, ProjectCompileOutput, ProjectPathsConfig,
multi::MultiCompiler,
solc::{Solc, SolcCompiler},
};
use std::sync::OnceLock;
fn testdata() -> &'static Path {
static CACHE: OnceLock<PathBuf> = OnceLock::new();
CACHE.get_or_init(|| {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("../../testdata").canonicalize().unwrap()
})
}
#[must_use]
struct LinkerTest {
project: Project,
output: ProjectCompileOutput,
dependency_assertions: HashMap<&'static str, Vec<(&'static str, Address)>>,
}
impl LinkerTest {
fn new(path: &Path, strip_prefixes: bool) -> Self {
assert!(path.exists(), "Path {path:?} does not exist");
let paths = ProjectPathsConfig::builder()
.root(testdata())
.lib(testdata().join("lib"))
.sources(path)
.tests(path)
.build()
.unwrap();
let solc = Solc::find_or_install(&Version::new(0, 8, 18)).unwrap();
let project = Project::builder()
.paths(paths)
.ephemeral()
.no_artifacts()
.build(MultiCompiler { solc: Some(SolcCompiler::Specific(solc)), vyper: None })
.unwrap();
let mut output = project.compile().unwrap();
if strip_prefixes {
output = output.with_stripped_file_prefixes(project.root());
}
Self { project, output, dependency_assertions: HashMap::default() }
}
fn assert_dependencies(
mut self,
artifact_id: &'static str,
deps: &[(&'static str, Address)],
) -> Self {
self.dependency_assertions.insert(artifact_id, deps.to_vec());
self
}
fn test_with_sender_and_nonce(self, sender: Address, initial_nonce: u64) {
let linker = Linker::new(self.project.root(), self.output.artifact_ids().collect());
for (id, identifier) in self.iter_linking_targets(&linker) {
let output = linker
.link_with_nonce_or_address(Default::default(), sender, initial_nonce, [id])
.expect("Linking failed");
self.validate_assertions(identifier, output);
}
}
fn test_with_create2(self, sender: Address, salt: B256) {
let linker = Linker::new(self.project.root(), self.output.artifact_ids().collect());
for (id, identifier) in self.iter_linking_targets(&linker) {
let output = linker
.link_with_create2(Default::default(), sender, salt, id)
.expect("Linking failed");
self.validate_assertions(identifier, output);
}
}
fn iter_linking_targets<'a>(
&'a self,
linker: &'a Linker<'_>,
) -> impl Iterator<Item = (&'a ArtifactId, String)> + 'a {
self.sanity_check(linker);
linker.contracts.keys().filter_map(move |id| {
// If we didn't strip paths, artifacts will have absolute paths.
// That's expected and we want to ensure that only `libraries` object has relative
// paths, artifacts should be kept as is.
let source = id
.source
.strip_prefix(self.project.root())
.unwrap_or(&id.source)
.to_string_lossy();
let identifier = format!("{source}:{}", id.name);
// Skip test utils as they always have no dependencies.
if identifier.contains("utils/") {
return None;
}
Some((id, identifier))
})
}
fn sanity_check(&self, linker: &Linker<'_>) {
assert!(!self.dependency_assertions.is_empty(), "Dependency assertions are empty");
assert!(!linker.contracts.is_empty(), "Linker contracts are empty");
}
fn validate_assertions(&self, identifier: String, output: LinkOutput) {
let LinkOutput { libs_to_deploy, libraries } = output;
let assertions = self
.dependency_assertions
.get(identifier.as_str())
.unwrap_or_else(|| panic!("Unexpected artifact: {identifier}"));
assert_eq!(
libs_to_deploy.len(),
assertions.len(),
"artifact {identifier} has more/less dependencies than expected ({} vs {}): {:#?}",
libs_to_deploy.len(),
assertions.len(),
libs_to_deploy
);
for &(dep_identifier, address) in assertions {
let (file, name) = dep_identifier.split_once(':').unwrap();
if let Some(lib_address) =
libraries.libs.get(Path::new(file)).and_then(|libs| libs.get(name))
{
assert_eq!(
lib_address.parse::<Address>().unwrap(),
address,
"incorrect library address for dependency {dep_identifier} of {identifier}"
);
} else {
panic!("Library {dep_identifier} not found");
}
}
}
}
fn link_test(path: impl AsRef<Path>, mut test_fn: impl FnMut(LinkerTest)) {
fn link_test(path: &Path, test_fn: &mut dyn FnMut(LinkerTest)) {
test_fn(LinkerTest::new(path, true));
test_fn(LinkerTest::new(path, false));
}
link_test(path.as_ref(), &mut test_fn);
}
#[test]
#[should_panic = "assertions are empty"]
fn no_assertions() {
link_test(testdata().join("default/linking/simple"), |linker| {
linker.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
#[should_panic = "does not exist"]
fn unknown_path() {
link_test("doesnotexist", |linker| {
linker
.assert_dependencies("a:b", &[])
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
fn link_simple() {
link_test(testdata().join("default/linking/simple"), |linker| {
linker
.assert_dependencies("default/linking/simple/Simple.t.sol:Lib", &[])
.assert_dependencies(
"default/linking/simple/Simple.t.sol:LibraryConsumer",
&[(
"default/linking/simple/Simple.t.sol:Lib",
address!("0x5a443704dd4b594b382c22a083e2bd3090a6fef3"),
)],
)
.assert_dependencies(
"default/linking/simple/Simple.t.sol:SimpleLibraryLinkingTest",
&[(
"default/linking/simple/Simple.t.sol:Lib",
address!("0x5a443704dd4b594b382c22a083e2bd3090a6fef3"),
)],
)
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
fn link_nested() {
link_test(testdata().join("default/linking/nested"), |linker| {
linker
.assert_dependencies("default/linking/nested/Nested.t.sol:Lib", &[])
.assert_dependencies(
"default/linking/nested/Nested.t.sol:NestedLib",
&[(
"default/linking/nested/Nested.t.sol:Lib",
address!("0x5a443704dd4b594b382c22a083e2bd3090a6fef3"),
)],
)
.assert_dependencies(
"default/linking/nested/Nested.t.sol:LibraryConsumer",
&[
// Lib shows up here twice, because the linker sees it twice, but it should
// have the same address and nonce.
(
"default/linking/nested/Nested.t.sol:Lib",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/nested/Nested.t.sol:NestedLib",
Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D")
.unwrap(),
),
],
)
.assert_dependencies(
"default/linking/nested/Nested.t.sol:NestedLibraryLinkingTest",
&[
(
"default/linking/nested/Nested.t.sol:Lib",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/nested/Nested.t.sol:NestedLib",
Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d")
.unwrap(),
),
],
)
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
fn link_duplicate() {
link_test(testdata().join("default/linking/duplicate"), |linker| {
linker
.assert_dependencies("default/linking/duplicate/Duplicate.t.sol:A", &[])
.assert_dependencies("default/linking/duplicate/Duplicate.t.sol:B", &[])
.assert_dependencies(
"default/linking/duplicate/Duplicate.t.sol:C",
&[(
"default/linking/duplicate/Duplicate.t.sol:A",
address!("0x5a443704dd4b594b382c22a083e2bd3090a6fef3"),
)],
)
.assert_dependencies(
"default/linking/duplicate/Duplicate.t.sol:D",
&[(
"default/linking/duplicate/Duplicate.t.sol:B",
address!("0x5a443704dd4b594b382c22a083e2bd3090a6fef3"),
)],
)
.assert_dependencies(
"default/linking/duplicate/Duplicate.t.sol:E",
&[
(
"default/linking/duplicate/Duplicate.t.sol:A",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:C",
Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d")
.unwrap(),
),
],
)
.assert_dependencies(
"default/linking/duplicate/Duplicate.t.sol:LibraryConsumer",
&[
(
"default/linking/duplicate/Duplicate.t.sol:A",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:B",
Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:C",
Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:D",
Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:E",
Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f")
.unwrap(),
),
],
)
.assert_dependencies(
"default/linking/duplicate/Duplicate.t.sol:DuplicateLibraryLinkingTest",
&[
(
"default/linking/duplicate/Duplicate.t.sol:A",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:B",
Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:C",
Address::from_str("0x8be503bcded90ed42eff31f56199399b2b0154ca")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:D",
Address::from_str("0x47c5e40890bce4a473a49d7501808b9633f29782")
.unwrap(),
),
(
"default/linking/duplicate/Duplicate.t.sol:E",
Address::from_str("0x29b2440db4a256b0c1e6d3b4cdcaa68e2440a08f")
.unwrap(),
),
],
)
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
fn link_cycle() {
link_test(testdata().join("default/linking/cycle"), |linker| {
linker
.assert_dependencies(
"default/linking/cycle/Cycle.t.sol:Foo",
&[
(
"default/linking/cycle/Cycle.t.sol:Foo",
Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D")
.unwrap(),
),
(
"default/linking/cycle/Cycle.t.sol:Bar",
Address::from_str("0x5a443704dd4B594B382c22a083e2BD3090A6feF3")
.unwrap(),
),
],
)
.assert_dependencies(
"default/linking/cycle/Cycle.t.sol:Bar",
&[
(
"default/linking/cycle/Cycle.t.sol:Foo",
Address::from_str("0x47e9Fbef8C83A1714F1951F142132E6e90F5fa5D")
.unwrap(),
),
(
"default/linking/cycle/Cycle.t.sol:Bar",
Address::from_str("0x5a443704dd4B594B382c22a083e2BD3090A6feF3")
.unwrap(),
),
],
)
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
#[ignore = "addresses depend on testdata utils internals for some reason"]
fn link_create2_nested() {
link_test(testdata().join("default/linking/nested"), |linker| {
linker
.assert_dependencies("default/linking/nested/Nested.t.sol:Lib", &[])
.assert_dependencies(
"default/linking/nested/Nested.t.sol:NestedLib",
&[(
"default/linking/nested/Nested.t.sol:Lib",
address!("0x773253227cce756e50c3993ec6366b3ec27786f9"),
)],
)
.assert_dependencies(
"default/linking/nested/Nested.t.sol:LibraryConsumer",
&[
// Lib shows up here twice, because the linker sees it twice, but it should
// have the same address and nonce.
(
"default/linking/nested/Nested.t.sol:Lib",
Address::from_str("0x773253227cce756e50c3993ec6366b3ec27786f9")
.unwrap(),
),
(
"default/linking/nested/Nested.t.sol:NestedLib",
Address::from_str("0xac231df03403867b05d092c26fc91b6b83f4bebe")
.unwrap(),
),
],
)
.assert_dependencies(
"default/linking/nested/Nested.t.sol:NestedLibraryLinkingTest",
&[
(
"default/linking/nested/Nested.t.sol:Lib",
Address::from_str("0x773253227cce756e50c3993ec6366b3ec27786f9")
.unwrap(),
),
(
"default/linking/nested/Nested.t.sol:NestedLib",
Address::from_str("0xac231df03403867b05d092c26fc91b6b83f4bebe")
.unwrap(),
),
],
)
.test_with_create2(
Address::default(),
fixed_bytes!(
"19bf59b7b67ae8edcbc6e53616080f61fa99285c061450ad601b0bc40c9adfc9"
),
);
});
}
#[test]
fn link_samefile_union() {
link_test(testdata().join("default/linking/samefile_union"), |linker| {
linker
.assert_dependencies("default/linking/samefile_union/Libs.sol:LInit", &[])
.assert_dependencies("default/linking/samefile_union/Libs.sol:LRun", &[])
.assert_dependencies(
"default/linking/samefile_union/SameFileUnion.t.sol:UsesBoth",
&[
(
"default/linking/samefile_union/Libs.sol:LInit",
Address::from_str("0x5a443704dd4b594b382c22a083e2bd3090a6fef3")
.unwrap(),
),
(
"default/linking/samefile_union/Libs.sol:LRun",
Address::from_str("0x47e9fbef8c83a1714f1951f142132e6e90f5fa5d")
.unwrap(),
),
],
)
.test_with_sender_and_nonce(Address::default(), 1);
});
}
#[test]
fn linking_failure() {
let linker = LinkerTest::new(&testdata().join("default/linking/simple"), true);
let linker_instance =
Linker::new(linker.project.root(), linker.output.artifact_ids().collect());
// Create a libraries object with an incorrect library name that won't match any references
let mut libraries = Libraries::default();
libraries.libs.entry("default/linking/simple/Simple.t.sol".into()).or_default().insert(
"NonExistentLib".to_string(),
"0x5a443704dd4b594b382c22a083e2bd3090a6fef3".to_string(),
);
// Try to link the LibraryConsumer contract with incorrect library
let artifact_id = linker_instance
.contracts
.keys()
.find(|id| id.name == "LibraryConsumer")
.expect("LibraryConsumer contract not found");
let contract = linker_instance.contracts.get(artifact_id).unwrap();
// Verify that the artifact has unlinked bytecode
assert!(
linker_instance.ensure_linked(contract, artifact_id).is_err(),
"Expected artifact to have unlinked bytecode"
);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/lib.rs | crates/test-utils/src/lib.rs | //! # foundry-test-utils
//!
//! Internal Foundry testing utilities.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
// Shouldn't use sh_* macros here, as they don't get captured by the test runner.
#![allow(clippy::disallowed_macros)]
#[macro_use]
extern crate tracing;
// Macros useful for testing.
#[macro_use]
mod macros;
pub mod rpc;
pub mod fd_lock;
mod filter;
pub use filter::Filter;
mod ext;
pub use ext::ExtTester;
mod prj;
pub use prj::{TestCommand, TestProject};
// Utilities for making it easier to handle tests.
pub mod util;
mod script;
pub use script::{ScriptOutcome, ScriptTester};
pub mod ui_runner;
// re-exports for convenience
pub use foundry_compilers;
pub use snapbox::{self, assert_data_eq, file, str};
/// Initializes tracing for tests.
pub fn init_tracing() {
use std::sync::Once;
static ONCE: Once = Once::new();
ONCE.call_once(|| {
if std::env::var_os("RUST_BACKTRACE").is_none() {
unsafe { std::env::set_var("RUST_BACKTRACE", "1") };
}
let _ = tracing_subscriber::FmtSubscriber::builder()
.with_env_filter(env_filter())
.with_test_writer()
.try_init();
let _ = ui_test::color_eyre::install();
});
}
fn env_filter() -> tracing_subscriber::EnvFilter {
const DEFAULT_DIRECTIVES: &[&str] = &include!("../../cli/src/utils/default_directives.txt");
let mut filter = tracing_subscriber::EnvFilter::builder()
.with_default_directive("foundry_test_utils=debug".parse().unwrap())
.from_env_lossy();
for &directive in DEFAULT_DIRECTIVES {
filter = filter.add_directive(directive.parse().unwrap());
}
filter
}
pub fn test_debug(args: std::fmt::Arguments<'_>) {
init_tracing();
debug!("{args}");
}
pub fn test_trace(args: std::fmt::Arguments<'_>) {
init_tracing();
trace!("{args}");
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/ext.rs | crates/test-utils/src/ext.rs | use crate::prj::{TestCommand, TestProject, clone_remote, setup_forge};
use foundry_compilers::PathStyle;
use std::process::Command;
/// External test builder
#[derive(Clone, Debug)]
#[must_use = "ExtTester does nothing unless you `run` it"]
pub struct ExtTester {
pub org: &'static str,
pub name: &'static str,
pub rev: &'static str,
pub style: PathStyle,
pub fork_block: Option<u64>,
pub args: Vec<String>,
pub envs: Vec<(String, String)>,
pub install_commands: Vec<Vec<String>>,
pub verbosity: String,
}
impl ExtTester {
/// Creates a new external test builder.
pub fn new(org: &'static str, name: &'static str, rev: &'static str) -> Self {
Self {
org,
name,
rev,
style: PathStyle::Dapptools,
fork_block: None,
args: vec![],
envs: vec![],
install_commands: vec![],
verbosity: "-vvv".to_string(),
}
}
/// Sets the path style.
pub fn style(mut self, style: PathStyle) -> Self {
self.style = style;
self
}
/// Sets the fork block.
pub fn fork_block(mut self, fork_block: u64) -> Self {
self.fork_block = Some(fork_block);
self
}
/// Adds an argument to the forge command.
pub fn arg(mut self, arg: impl Into<String>) -> Self {
self.args.push(arg.into());
self
}
/// Adds multiple arguments to the forge command.
pub fn args<I, A>(mut self, args: I) -> Self
where
I: IntoIterator<Item = A>,
A: Into<String>,
{
self.args.extend(args.into_iter().map(Into::into));
self
}
/// Sets the verbosity
pub fn verbosity(mut self, verbosity: usize) -> Self {
self.verbosity = format!("-{}", "v".repeat(verbosity));
self
}
/// Adds an environment variable to the forge command.
pub fn env(mut self, key: impl Into<String>, value: impl Into<String>) -> Self {
self.envs.push((key.into(), value.into()));
self
}
/// Adds multiple environment variables to the forge command.
pub fn envs<I, K, V>(mut self, envs: I) -> Self
where
I: IntoIterator<Item = (K, V)>,
K: Into<String>,
V: Into<String>,
{
self.envs.extend(envs.into_iter().map(|(k, v)| (k.into(), v.into())));
self
}
/// Adds a command to run after the project is cloned.
///
/// Note that the command is run in the project's root directory, and it won't fail the test if
/// it fails.
pub fn install_command(mut self, command: &[&str]) -> Self {
self.install_commands.push(command.iter().map(|s| s.to_string()).collect());
self
}
pub fn setup_forge_prj(&self, recursive: bool) -> (TestProject, TestCommand) {
let (prj, mut test_cmd) = setup_forge(self.name, self.style.clone());
// Export vyper and forge in test command - workaround for snekmate venom tests.
if let Some(vyper) = &prj.inner.project().compiler.vyper {
let vyper_dir = vyper.path.parent().expect("vyper path should have a parent");
let forge_bin = prj.forge_path();
let forge_dir = forge_bin.parent().expect("forge path should have a parent");
let existing_path = std::env::var_os("PATH").unwrap_or_default();
let mut new_paths = vec![vyper_dir.to_path_buf(), forge_dir.to_path_buf()];
new_paths.extend(std::env::split_paths(&existing_path));
let joined_path = std::env::join_paths(new_paths).expect("failed to join PATH");
test_cmd.env("PATH", joined_path);
}
// Wipe the default structure.
prj.wipe();
// Clone the external repository.
let repo_url = format!("https://github.com/{}/{}.git", self.org, self.name);
let root = prj.root().to_str().unwrap();
clone_remote(&repo_url, root, recursive);
// Checkout the revision.
if self.rev.is_empty() {
let mut git = Command::new("git");
git.current_dir(root).args(["log", "-n", "1"]);
test_debug!("$ {git:?}");
let output = git.output().unwrap();
if !output.status.success() {
panic!("git log failed: {output:?}");
}
let stdout = String::from_utf8(output.stdout).unwrap();
let commit = stdout.lines().next().unwrap().split_whitespace().nth(1).unwrap();
panic!("pin to latest commit: {commit}");
} else {
let mut git = Command::new("git");
git.current_dir(root).args(["checkout", self.rev]);
test_debug!("$ {git:?}");
let status = git.status().unwrap();
if !status.success() {
panic!("git checkout failed: {status}");
}
}
(prj, test_cmd)
}
pub fn run_install_commands(&self, root: &str) {
for install_command in &self.install_commands {
let mut install_cmd = Command::new(&install_command[0]);
install_cmd.args(&install_command[1..]).current_dir(root);
test_debug!("cd {root}; {install_cmd:?}");
match install_cmd.status() {
Ok(s) => {
test_debug!("\n\n{install_cmd:?}: {s}");
if s.success() {
break;
}
}
Err(e) => {
eprintln!("\n\n{install_cmd:?}: {e}");
}
}
}
}
/// Runs the test.
pub fn run(&self) {
let (prj, mut test_cmd) = self.setup_forge_prj(true);
// Run installation command.
self.run_install_commands(prj.root().to_str().unwrap());
// Run the tests.
test_cmd.arg("test");
test_cmd.args(&self.args);
test_cmd.args(["--fuzz-runs=32", "--ffi", &self.verbosity]);
test_cmd.envs(self.envs.iter().map(|(k, v)| (k, v)));
if let Some(fork_block) = self.fork_block {
test_cmd.env("FOUNDRY_ETH_RPC_URL", crate::rpc::next_http_archive_rpc_url());
test_cmd.env("FOUNDRY_FORK_BLOCK_NUMBER", fork_block.to_string());
}
test_cmd.env("FOUNDRY_INVARIANT_DEPTH", "15");
test_cmd.env("FOUNDRY_ALLOW_INTERNAL_EXPECT_REVERT", "true");
test_cmd.assert_success();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/prj.rs | crates/test-utils/src/prj.rs | use crate::{init_tracing, rpc::rpc_endpoints};
use eyre::{Result, WrapErr};
use foundry_compilers::{
ArtifactOutput, ConfigurableArtifacts, PathStyle, ProjectPathsConfig,
artifacts::Contract,
cache::CompilerCache,
compilers::multi::MultiCompiler,
project_util::{TempProject, copy_dir},
solc::SolcSettings,
};
use foundry_config::Config;
use parking_lot::Mutex;
use regex::Regex;
use snapbox::{Data, IntoData, assert_data_eq, cmd::OutputAssert};
use std::{
env,
ffi::OsStr,
fs::{self, File},
io::{BufWriter, Write},
path::{Path, PathBuf},
process::{Command, Output, Stdio},
sync::{
Arc, LazyLock,
atomic::{AtomicUsize, Ordering},
},
};
use crate::util::{SOLC_VERSION, pretty_err};
static CURRENT_DIR_LOCK: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));
/// Global test identifier.
static NEXT_ID: AtomicUsize = AtomicUsize::new(0);
/// Clones a remote repository into the specified directory. Panics if the command fails.
pub fn clone_remote(repo_url: &str, target_dir: &str, recursive: bool) {
let mut cmd = Command::new("git");
cmd.args(["clone"]);
if recursive {
cmd.args(["--recursive", "--shallow-submodules"]);
} else {
cmd.args(["--depth=1", "--no-checkout", "--filter=blob:none", "--no-recurse-submodules"]);
}
cmd.args([repo_url, target_dir]);
test_debug!("{cmd:?}");
let status = cmd.status().unwrap();
if !status.success() {
panic!("git clone failed: {status}");
}
}
/// Setup an empty test project and return a command pointing to the forge
/// executable whose CWD is set to the project's root.
///
/// The name given will be used to create the directory. Generally, it should
/// correspond to the test name.
#[track_caller]
pub fn setup_forge(name: &str, style: PathStyle) -> (TestProject, TestCommand) {
setup_forge_project(TestProject::new(name, style))
}
pub fn setup_forge_project(test: TestProject) -> (TestProject, TestCommand) {
let cmd = test.forge_command();
(test, cmd)
}
/// How to initialize a remote git project
#[derive(Clone, Debug)]
pub struct RemoteProject {
id: String,
run_build: bool,
run_commands: Vec<Vec<String>>,
path_style: PathStyle,
}
impl RemoteProject {
pub fn new(id: impl Into<String>) -> Self {
Self {
id: id.into(),
run_build: true,
run_commands: vec![],
path_style: PathStyle::Dapptools,
}
}
/// Whether to run `forge build`
pub fn set_build(mut self, run_build: bool) -> Self {
self.run_build = run_build;
self
}
/// Configures the project's pathstyle
pub fn path_style(mut self, path_style: PathStyle) -> Self {
self.path_style = path_style;
self
}
/// Add another command to run after cloning
pub fn cmd(mut self, cmd: impl IntoIterator<Item = impl Into<String>>) -> Self {
self.run_commands.push(cmd.into_iter().map(Into::into).collect());
self
}
}
impl<T: Into<String>> From<T> for RemoteProject {
fn from(id: T) -> Self {
Self::new(id)
}
}
/// Setups a new local forge project by cloning and initializing the `RemoteProject`
///
/// This will
/// 1. clone the prj, like "transmissions1/solmate"
/// 2. run `forge build`, if configured
/// 3. run additional commands
///
/// # Panics
///
/// If anything goes wrong during, checkout, build, or other commands are unsuccessful
pub fn setup_forge_remote(prj: impl Into<RemoteProject>) -> (TestProject, TestCommand) {
try_setup_forge_remote(prj).unwrap()
}
/// Same as `setup_forge_remote` but not panicking
pub fn try_setup_forge_remote(
config: impl Into<RemoteProject>,
) -> Result<(TestProject, TestCommand)> {
let config = config.into();
let mut tmp = TempProject::checkout(&config.id).wrap_err("failed to checkout project")?;
tmp.project_mut().paths = config.path_style.paths(tmp.root())?;
let prj = TestProject::with_project(tmp);
if config.run_build {
let mut cmd = prj.forge_command();
cmd.arg("build").assert_success();
}
for addon in config.run_commands {
debug_assert!(!addon.is_empty());
let mut cmd = Command::new(&addon[0]);
if addon.len() > 1 {
cmd.args(&addon[1..]);
}
let status = cmd
.current_dir(prj.root())
.stdout(Stdio::null())
.stderr(Stdio::null())
.status()
.wrap_err_with(|| format!("Failed to execute {addon:?}"))?;
eyre::ensure!(status.success(), "Failed to execute command {:?}", addon);
}
let cmd = prj.forge_command();
Ok((prj, cmd))
}
pub fn setup_cast(name: &str, style: PathStyle) -> (TestProject, TestCommand) {
setup_cast_project(TestProject::new(name, style))
}
pub fn setup_cast_project(test: TestProject) -> (TestProject, TestCommand) {
let cmd = test.cast_command();
(test, cmd)
}
/// `TestProject` represents a temporary project to run tests against.
///
/// Test projects are created from a global atomic counter to avoid duplicates.
#[derive(Clone, Debug)]
pub struct TestProject<
T: ArtifactOutput<CompilerContract = Contract> + Default = ConfigurableArtifacts,
> {
/// The directory in which this test executable is running.
exe_root: PathBuf,
/// The project in which the test should run.
pub(crate) inner: Arc<TempProject<MultiCompiler, T>>,
}
impl TestProject {
/// Create a new test project with the given name. The name
/// does not need to be distinct for each invocation, but should correspond
/// to a logical grouping of tests.
pub fn new(name: &str, style: PathStyle) -> Self {
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let project = pretty_err(name, TempProject::with_style(&format!("{name}-{id}"), style));
Self::with_project(project)
}
pub fn with_project(project: TempProject) -> Self {
init_tracing();
let this = env::current_exe().unwrap();
let exe_root = canonicalize(this.parent().expect("executable's directory"));
Self { exe_root, inner: Arc::new(project) }
}
/// Returns the root path of the project's workspace.
pub fn root(&self) -> &Path {
self.inner.root()
}
/// Returns the paths config.
pub fn paths(&self) -> &ProjectPathsConfig {
self.inner.paths()
}
/// Returns the path to the project's `foundry.toml` file.
pub fn config(&self) -> PathBuf {
self.root().join(Config::FILE_NAME)
}
/// Returns the path to the project's cache file.
pub fn cache(&self) -> &PathBuf {
&self.paths().cache
}
/// Returns the path to the project's artifacts directory.
pub fn artifacts(&self) -> &PathBuf {
&self.paths().artifacts
}
/// Removes the project's cache and artifacts directory.
pub fn clear(&self) {
self.clear_cache();
self.clear_artifacts();
}
/// Removes this project's cache file.
pub fn clear_cache(&self) {
let _ = fs::remove_file(self.cache());
}
/// Removes this project's artifacts directory.
pub fn clear_artifacts(&self) {
let _ = fs::remove_dir_all(self.artifacts());
}
/// Removes the entire cache directory (including fuzz, invariant, and test-failures caches).
pub fn clear_cache_dir(&self) {
let _ = fs::remove_dir_all(self.root().join("cache"));
}
/// Updates the project's config with the given function.
pub fn update_config(&self, f: impl FnOnce(&mut Config)) {
self._update_config(Box::new(f));
}
fn _update_config(&self, f: Box<dyn FnOnce(&mut Config) + '_>) {
let mut config = self
.config()
.exists()
.then_some(())
.and_then(|()| Config::load_with_root(self.root()).ok())
.unwrap_or_default();
config.remappings.clear();
f(&mut config);
self.write_config(config);
}
/// Writes the given config as toml to `foundry.toml`.
#[doc(hidden)] // Prefer `update_config`.
pub fn write_config(&self, config: Config) {
let file = self.config();
pretty_err(&file, fs::write(&file, config.to_string_pretty().unwrap()));
}
/// Writes [`rpc_endpoints`] to the project's config.
pub fn add_rpc_endpoints(&self) {
self.update_config(|config| {
config.rpc_endpoints = rpc_endpoints();
});
}
/// Adds a source file to the project.
pub fn add_source(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_source(name, Self::add_source_prelude(contents)).unwrap()
}
/// Adds a source file to the project. Prefer using `add_source` instead.
pub fn add_raw_source(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_source(name, contents).unwrap()
}
/// Adds a script file to the project.
pub fn add_script(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_script(name, Self::add_source_prelude(contents)).unwrap()
}
/// Adds a script file to the project. Prefer using `add_script` instead.
pub fn add_raw_script(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_script(name, contents).unwrap()
}
/// Adds a test file to the project.
pub fn add_test(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_test(name, Self::add_source_prelude(contents)).unwrap()
}
/// Adds a test file to the project. Prefer using `add_test` instead.
pub fn add_raw_test(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_test(name, contents).unwrap()
}
/// Adds a library file to the project.
pub fn add_lib(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_lib(name, Self::add_source_prelude(contents)).unwrap()
}
/// Adds a library file to the project. Prefer using `add_lib` instead.
pub fn add_raw_lib(&self, name: &str, contents: &str) -> PathBuf {
self.inner.add_lib(name, contents).unwrap()
}
fn add_source_prelude(s: &str) -> String {
let mut s = s.to_string();
if !s.contains("pragma solidity") {
s = format!("pragma solidity ={SOLC_VERSION};\n{s}");
}
if !s.contains("// SPDX") {
s = format!("// SPDX-License-Identifier: MIT OR Apache-2.0\n{s}");
}
s
}
/// Asserts that the `<root>/foundry.toml` file exists.
#[track_caller]
pub fn assert_config_exists(&self) {
assert!(self.config().exists());
}
/// Asserts that the `<root>/cache/sol-files-cache.json` file exists.
#[track_caller]
pub fn assert_cache_exists(&self) {
assert!(self.cache().exists());
}
/// Asserts that the `<root>/out` file exists.
#[track_caller]
pub fn assert_artifacts_dir_exists(&self) {
assert!(self.paths().artifacts.exists());
}
/// Creates all project dirs and ensure they were created
#[track_caller]
pub fn assert_create_dirs_exists(&self) {
self.paths().create_all().unwrap_or_else(|_| panic!("Failed to create project paths"));
CompilerCache::<SolcSettings>::default()
.write(&self.paths().cache)
.expect("Failed to create cache");
self.assert_all_paths_exist();
}
/// Ensures that the given layout exists
#[track_caller]
pub fn assert_style_paths_exist(&self, style: PathStyle) {
let paths = style.paths(&self.paths().root).unwrap();
config_paths_exist(&paths, self.inner.project().cached);
}
/// Copies the project's root directory to the given target
#[track_caller]
pub fn copy_to(&self, target: impl AsRef<Path>) {
let target = target.as_ref();
pretty_err(target, fs::create_dir_all(target));
pretty_err(target, copy_dir(self.root(), target));
}
/// Creates a file with contents `contents` in the test project's directory. The
/// file will be deleted when the project is dropped.
pub fn create_file(&self, path: impl AsRef<Path>, contents: &str) -> PathBuf {
let path = path.as_ref();
if !path.is_relative() {
panic!("create_file(): file path is absolute");
}
let path = self.root().join(path);
if let Some(parent) = path.parent() {
pretty_err(parent, std::fs::create_dir_all(parent));
}
let file = pretty_err(&path, File::create(&path));
let mut writer = BufWriter::new(file);
pretty_err(&path, writer.write_all(contents.as_bytes()));
path
}
/// Adds DSTest as a source under "test.sol"
pub fn insert_ds_test(&self) -> PathBuf {
self.add_source("test.sol", include_str!("../../../testdata/utils/DSTest.sol"))
}
/// Adds custom test utils under the "test/utils" directory.
pub fn insert_utils(&self) {
self.add_test("utils/DSTest.sol", include_str!("../../../testdata/utils/DSTest.sol"));
self.add_test("utils/Test.sol", include_str!("../../../testdata/utils/Test.sol"));
self.add_test("utils/Vm.sol", include_str!("../../../testdata/utils/Vm.sol"));
self.add_test("utils/console.sol", include_str!("../../../testdata/utils/console.sol"));
}
/// Adds `console.sol` as a source under "console.sol"
pub fn insert_console(&self) -> PathBuf {
let s = include_str!("../../../testdata/utils/console.sol");
self.add_source("console.sol", s)
}
/// Adds `Vm.sol` as a source under "Vm.sol"
pub fn insert_vm(&self) -> PathBuf {
let s = include_str!("../../../testdata/utils/Vm.sol");
self.add_source("Vm.sol", s)
}
/// Asserts all project paths exist. These are:
/// - sources
/// - artifacts
/// - libs
/// - cache
pub fn assert_all_paths_exist(&self) {
let paths = self.paths();
config_paths_exist(paths, self.inner.project().cached);
}
/// Asserts that the artifacts dir and cache don't exist
pub fn assert_cleaned(&self) {
let paths = self.paths();
assert!(!paths.cache.exists());
assert!(!paths.artifacts.exists());
}
/// Creates a new command that is set to use the forge executable for this project
#[track_caller]
pub fn forge_command(&self) -> TestCommand {
let cmd = self.forge_bin();
let _lock = CURRENT_DIR_LOCK.lock();
TestCommand {
project: self.clone(),
cmd,
current_dir_lock: None,
saved_cwd: pretty_err("<current dir>", std::env::current_dir()),
stdin: None,
redact_output: true,
}
}
/// Creates a new command that is set to use the cast executable for this project
pub fn cast_command(&self) -> TestCommand {
let mut cmd = self.cast_bin();
cmd.current_dir(self.inner.root());
let _lock = CURRENT_DIR_LOCK.lock();
TestCommand {
project: self.clone(),
cmd,
current_dir_lock: None,
saved_cwd: pretty_err("<current dir>", std::env::current_dir()),
stdin: None,
redact_output: true,
}
}
/// Returns the path to the forge executable.
pub fn forge_bin(&self) -> Command {
let mut cmd = Command::new(self.forge_path());
cmd.current_dir(self.inner.root());
// Disable color output for comparisons; can be overridden with `--color always`.
cmd.env("NO_COLOR", "1");
cmd
}
pub(crate) fn forge_path(&self) -> PathBuf {
canonicalize(self.exe_root.join(format!("../forge{}", env::consts::EXE_SUFFIX)))
}
/// Returns the path to the cast executable.
pub fn cast_bin(&self) -> Command {
let cast = canonicalize(self.exe_root.join(format!("../cast{}", env::consts::EXE_SUFFIX)));
let mut cmd = Command::new(cast);
// disable color output for comparisons
cmd.env("NO_COLOR", "1");
cmd
}
/// Returns the `Config` as spit out by `forge config`
pub fn config_from_output<I, A>(&self, args: I) -> Config
where
I: IntoIterator<Item = A>,
A: AsRef<OsStr>,
{
let mut cmd = self.forge_bin();
cmd.arg("config").arg("--root").arg(self.root()).args(args).arg("--json");
let output = cmd.output().unwrap();
let c = lossy_string(&output.stdout);
let config: Config = serde_json::from_str(c.as_ref()).unwrap();
config.sanitized()
}
/// Removes all files and dirs inside the project's root dir
pub fn wipe(&self) {
pretty_err(self.root(), fs::remove_dir_all(self.root()));
pretty_err(self.root(), fs::create_dir_all(self.root()));
}
/// Removes all contract files from `src`, `test`, `script`
pub fn wipe_contracts(&self) {
fn rm_create(path: &Path) {
pretty_err(path, fs::remove_dir_all(path));
pretty_err(path, fs::create_dir(path));
}
rm_create(&self.paths().sources);
rm_create(&self.paths().tests);
rm_create(&self.paths().scripts);
}
/// Initializes the default contracts (Counter.sol, Counter.t.sol, Counter.s.sol).
///
/// This is useful for tests that need the default contracts created by `forge init`.
/// Most tests should not need this method, as the default behavior is to create an empty
/// project.
pub fn initialize_default_contracts(&self) {
self.add_raw_source(
"Counter.sol",
include_str!("../../forge/assets/solidity/CounterTemplate.sol"),
);
self.add_raw_test(
"Counter.t.sol",
include_str!("../../forge/assets/solidity/CounterTemplate.t.sol"),
);
self.add_raw_script(
"Counter.s.sol",
include_str!("../../forge/assets/solidity/CounterTemplate.s.sol"),
);
}
}
fn config_paths_exist(paths: &ProjectPathsConfig, cached: bool) {
if cached {
assert!(paths.cache.exists());
}
assert!(paths.sources.exists());
assert!(paths.artifacts.exists());
paths.libraries.iter().for_each(|lib| assert!(lib.exists()));
}
/// A simple wrapper around a Command with some conveniences.
pub struct TestCommand {
saved_cwd: PathBuf,
/// The project used to launch this command.
project: TestProject,
/// The actual command we use to control the process.
cmd: Command,
// initial: Command,
current_dir_lock: Option<parking_lot::MutexGuard<'static, ()>>,
stdin: Option<Vec<u8>>,
/// If true, command output is redacted.
redact_output: bool,
}
impl TestCommand {
/// Returns a mutable reference to the underlying command.
pub fn cmd(&mut self) -> &mut Command {
&mut self.cmd
}
/// Replaces the underlying command.
pub fn set_cmd(&mut self, cmd: Command) -> &mut Self {
self.cmd = cmd;
self
}
/// Resets the command to the default `forge` command.
pub fn forge_fuse(&mut self) -> &mut Self {
self.set_cmd(self.project.forge_bin())
}
/// Resets the command to the default `cast` command.
pub fn cast_fuse(&mut self) -> &mut Self {
self.set_cmd(self.project.cast_bin())
}
/// Sets the current working directory.
pub fn set_current_dir(&mut self, p: impl AsRef<Path>) {
drop(self.current_dir_lock.take());
let lock = CURRENT_DIR_LOCK.lock();
self.current_dir_lock = Some(lock);
let p = p.as_ref();
pretty_err(p, std::env::set_current_dir(p));
}
/// Add an argument to pass to the command.
pub fn arg<A: AsRef<OsStr>>(&mut self, arg: A) -> &mut Self {
self.cmd.arg(arg);
self
}
/// Add any number of arguments to the command.
pub fn args<I, A>(&mut self, args: I) -> &mut Self
where
I: IntoIterator<Item = A>,
A: AsRef<OsStr>,
{
self.cmd.args(args);
self
}
/// Set the stdin bytes for the next command.
pub fn stdin(&mut self, stdin: impl Into<Vec<u8>>) -> &mut Self {
self.stdin = Some(stdin.into());
self
}
/// Convenience function to add `--root project.root()` argument
pub fn root_arg(&mut self) -> &mut Self {
let root = self.project.root().to_path_buf();
self.arg("--root").arg(root)
}
/// Set the environment variable `k` to value `v` for the command.
pub fn env(&mut self, k: impl AsRef<OsStr>, v: impl AsRef<OsStr>) {
self.cmd.env(k, v);
}
/// Set the environment variable `k` to value `v` for the command.
pub fn envs<I, K, V>(&mut self, envs: I)
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<OsStr>,
V: AsRef<OsStr>,
{
self.cmd.envs(envs);
}
/// Unsets the environment variable `k` for the command.
pub fn unset_env(&mut self, k: impl AsRef<OsStr>) {
self.cmd.env_remove(k);
}
/// Set the working directory for this command.
///
/// Note that this does not need to be called normally, since the creation
/// of this TestCommand causes its working directory to be set to the
/// test's directory automatically.
pub fn current_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut Self {
self.cmd.current_dir(dir);
self
}
/// Returns the `Config` as spit out by `forge config`
#[track_caller]
pub fn config(&mut self) -> Config {
self.cmd.args(["config", "--json"]);
let output = self.assert().success().get_output().stdout_lossy();
self.forge_fuse();
serde_json::from_str(output.as_ref()).unwrap()
}
/// Runs `git init` inside the project's dir
#[track_caller]
pub fn git_init(&self) {
let mut cmd = Command::new("git");
cmd.arg("init").current_dir(self.project.root());
let output = OutputAssert::new(cmd.output().unwrap());
output.success();
}
/// Runs `git submodule status` inside the project's dir
#[track_caller]
pub fn git_submodule_status(&self) -> Output {
let mut cmd = Command::new("git");
cmd.arg("submodule").arg("status").current_dir(self.project.root());
cmd.output().unwrap()
}
/// Runs `git add .` inside the project's dir
#[track_caller]
pub fn git_add(&self) {
let mut cmd = Command::new("git");
cmd.current_dir(self.project.root());
cmd.arg("add").arg(".");
let output = OutputAssert::new(cmd.output().unwrap());
output.success();
}
/// Runs `git commit .` inside the project's dir
#[track_caller]
pub fn git_commit(&self, msg: &str) {
let mut cmd = Command::new("git");
cmd.current_dir(self.project.root());
cmd.arg("commit").arg("-m").arg(msg);
let output = OutputAssert::new(cmd.output().unwrap());
output.success();
}
/// Runs the command, returning a [`snapbox`] object to assert the command output.
#[track_caller]
pub fn assert_with(&mut self, f: &[RegexRedaction]) -> OutputAssert {
let assert = OutputAssert::new(self.execute());
if self.redact_output {
let mut redactions = test_redactions();
insert_redactions(f, &mut redactions);
return assert.with_assert(
snapbox::Assert::new()
.action_env(snapbox::assert::DEFAULT_ACTION_ENV)
.redact_with(redactions),
);
}
assert
}
/// Runs the command, returning a [`snapbox`] object to assert the command output.
#[track_caller]
pub fn assert(&mut self) -> OutputAssert {
self.assert_with(&[])
}
/// Runs the command and asserts that it resulted in success.
#[track_caller]
pub fn assert_success(&mut self) -> OutputAssert {
self.assert().success()
}
/// Runs the command and asserts that it resulted in success, with expected JSON data.
#[track_caller]
pub fn assert_json_stdout(&mut self, expected: impl IntoData) {
let expected = expected.is(snapbox::data::DataFormat::Json).unordered();
let stdout = self.assert_success().get_output().stdout.clone();
let actual = stdout.into_data().is(snapbox::data::DataFormat::Json).unordered();
assert_data_eq!(actual, expected);
}
/// Runs the command and asserts that it resulted in the expected outcome and JSON data.
#[track_caller]
pub fn assert_json_stderr(&mut self, success: bool, expected: impl IntoData) {
let expected = expected.is(snapbox::data::DataFormat::Json).unordered();
let stderr = if success { self.assert_success() } else { self.assert_failure() }
.get_output()
.stderr
.clone();
let actual = stderr.into_data().is(snapbox::data::DataFormat::Json).unordered();
assert_data_eq!(actual, expected);
}
/// Runs the command and asserts that it **succeeded** nothing was printed to stdout.
#[track_caller]
pub fn assert_empty_stdout(&mut self) {
self.assert_success().stdout_eq(Data::new());
}
/// Runs the command and asserts that it failed.
#[track_caller]
pub fn assert_failure(&mut self) -> OutputAssert {
self.assert().failure()
}
/// Runs the command and asserts that the exit code is `expected`.
#[track_caller]
pub fn assert_code(&mut self, expected: i32) -> OutputAssert {
self.assert().code(expected)
}
/// Runs the command and asserts that it **failed** nothing was printed to stderr.
#[track_caller]
pub fn assert_empty_stderr(&mut self) {
self.assert_failure().stderr_eq(Data::new());
}
/// Runs the command with a temporary file argument and asserts that the contents of the file
/// match the given data.
#[track_caller]
pub fn assert_file(&mut self, data: impl IntoData) {
self.assert_file_with(|this, path| _ = this.arg(path).assert_success(), data);
}
/// Creates a temporary file, passes it to `f`, then asserts that the contents of the file match
/// the given data.
#[track_caller]
pub fn assert_file_with(&mut self, f: impl FnOnce(&mut Self, &Path), data: impl IntoData) {
let file = tempfile::NamedTempFile::new().expect("couldn't create temporary file");
f(self, file.path());
assert_data_eq!(Data::read_from(file.path(), None), data);
}
/// Does not apply [`snapbox`] redactions to the command output.
pub fn with_no_redact(&mut self) -> &mut Self {
self.redact_output = false;
self
}
/// Executes command, applies stdin function and returns output
#[track_caller]
pub fn execute(&mut self) -> Output {
self.try_execute().unwrap()
}
#[track_caller]
pub fn try_execute(&mut self) -> std::io::Result<Output> {
test_debug!("executing {:?}", self.cmd);
let mut child =
self.cmd.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::piped()).spawn()?;
if let Some(bytes) = self.stdin.take() {
child.stdin.take().unwrap().write_all(&bytes)?;
}
let output = child.wait_with_output()?;
test_debug!("exited with {}", output.status);
test_trace!("\n--- stdout ---\n{}\n--- /stdout ---", output.stdout_lossy());
test_trace!("\n--- stderr ---\n{}\n--- /stderr ---", output.stderr_lossy());
Ok(output)
}
}
impl Drop for TestCommand {
fn drop(&mut self) {
let _lock = self.current_dir_lock.take().unwrap_or_else(|| CURRENT_DIR_LOCK.lock());
if self.saved_cwd.exists() {
let _ = std::env::set_current_dir(&self.saved_cwd);
}
}
}
fn test_redactions() -> snapbox::Redactions {
static REDACTIONS: LazyLock<snapbox::Redactions> = LazyLock::new(|| {
make_redactions(&[
("[SOLC_VERSION]", r"Solc( version)? \d+.\d+.\d+"),
("[ELAPSED]", r"(finished )?in \d+(\.\d+)?\w?s( \(.*?s CPU time\))?"),
("[GAS]", r"[Gg]as( used)?: \d+"),
("[GAS_COST]", r"[Gg]as cost\s*\(\d+\)"),
("[GAS_LIMIT]", r"[Gg]as limit\s*\(\d+\)"),
("[AVG_GAS]", r"μ: \d+, ~: \d+"),
("[FILE]", r"-->.*\.sol"),
("[FILE]", r"Location(.|\n)*\.rs(.|\n)*Backtrace"),
("[COMPILING_FILES]", r"Compiling \d+ files?"),
("[TX_HASH]", r"Transaction hash: 0x[0-9A-Fa-f]{64}"),
("[ADDRESS]", r"Address: +0x[0-9A-Fa-f]{40}"),
("[PUBLIC_KEY]", r"Public key: +0x[0-9A-Fa-f]{128}"),
("[PRIVATE_KEY]", r"Private key: +0x[0-9A-Fa-f]{64}"),
("[UPDATING_DEPENDENCIES]", r"Updating dependencies in .*"),
("[SAVED_TRANSACTIONS]", r"Transactions saved to: .*\.json"),
("[SAVED_SENSITIVE_VALUES]", r"Sensitive values saved to: .*\.json"),
("[ESTIMATED_GAS_PRICE]", r"Estimated gas price:\s*(\d+(\.\d+)?)\s*gwei"),
("[ESTIMATED_TOTAL_GAS_USED]", r"Estimated total gas used for script: \d+"),
(
"[ESTIMATED_AMOUNT_REQUIRED]",
r"Estimated amount required:\s*(\d+(\.\d+)?)\s*[A-Z]{3}",
),
])
});
REDACTIONS.clone()
}
/// A tuple of a placeholder and a regex replacement string.
pub type RegexRedaction = (&'static str, &'static str);
/// Creates a [`snapbox`] redactions object from a list of regex redactions.
fn make_redactions(redactions: &[RegexRedaction]) -> snapbox::Redactions {
let mut r = snapbox::Redactions::new();
insert_redactions(redactions, &mut r);
r
}
fn insert_redactions(redactions: &[RegexRedaction], r: &mut snapbox::Redactions) {
for &(placeholder, re) in redactions {
r.insert(placeholder, Regex::new(re).expect(re)).expect(re);
}
}
/// Extension trait for [`Output`].
pub trait OutputExt {
/// Returns the stdout as lossy string
fn stdout_lossy(&self) -> String;
/// Returns the stderr as lossy string
fn stderr_lossy(&self) -> String;
}
impl OutputExt for Output {
fn stdout_lossy(&self) -> String {
lossy_string(&self.stdout)
}
fn stderr_lossy(&self) -> String {
lossy_string(&self.stderr)
}
}
pub fn lossy_string(bytes: &[u8]) -> String {
String::from_utf8_lossy(bytes).replace("\r\n", "\n")
}
fn canonicalize(path: impl AsRef<Path>) -> PathBuf {
foundry_common::fs::canonicalize_path(path.as_ref())
.unwrap_or_else(|_| path.as_ref().to_path_buf())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/filter.rs | crates/test-utils/src/filter.rs | use foundry_common::TestFilter;
use regex::Regex;
use std::path::Path;
#[derive(Clone, Debug)]
pub struct Filter {
test_regex: Regex,
contract_regex: Regex,
path_regex: Regex,
exclude_tests: Option<Regex>,
exclude_contracts: Option<Regex>,
exclude_paths: Option<Regex>,
}
impl Filter {
pub fn new(test_pattern: &str, contract_pattern: &str, path_pattern: &str) -> Self {
Self {
test_regex: Regex::new(test_pattern)
.unwrap_or_else(|_| panic!("Failed to parse test pattern: `{test_pattern}`")),
contract_regex: Regex::new(contract_pattern).unwrap_or_else(|_| {
panic!("Failed to parse contract pattern: `{contract_pattern}`")
}),
path_regex: Regex::new(path_pattern)
.unwrap_or_else(|_| panic!("Failed to parse path pattern: `{path_pattern}`")),
exclude_tests: None,
exclude_contracts: None,
exclude_paths: None,
}
}
pub fn contract(contract_pattern: &str) -> Self {
Self::new(".*", contract_pattern, ".*")
}
pub fn path(path_pattern: &str) -> Self {
Self::new(".*", ".*", path_pattern)
}
/// All tests to also exclude
///
/// This is a workaround since regex does not support negative look aheads
pub fn exclude_tests(mut self, pattern: &str) -> Self {
self.exclude_tests = Some(Regex::new(pattern).unwrap());
self
}
/// All contracts to also exclude
///
/// This is a workaround since regex does not support negative look aheads
pub fn exclude_contracts(mut self, pattern: &str) -> Self {
self.exclude_contracts = Some(Regex::new(pattern).unwrap());
self
}
/// All paths to also exclude
///
/// This is a workaround since regex does not support negative look aheads
pub fn exclude_paths(mut self, pattern: &str) -> Self {
self.exclude_paths = Some(Regex::new(pattern).unwrap());
self
}
pub fn matches_all() -> Self {
Self {
test_regex: Regex::new(".*").unwrap(),
contract_regex: Regex::new(".*").unwrap(),
path_regex: Regex::new(".*").unwrap(),
exclude_tests: None,
exclude_contracts: None,
exclude_paths: None,
}
}
}
impl TestFilter for Filter {
fn matches_test(&self, test_signature: &str) -> bool {
if let Some(exclude) = &self.exclude_tests
&& exclude.is_match(test_signature)
{
return false;
}
self.test_regex.is_match(test_signature)
}
fn matches_contract(&self, contract_name: &str) -> bool {
if let Some(exclude) = &self.exclude_contracts
&& exclude.is_match(contract_name)
{
return false;
}
self.contract_regex.is_match(contract_name)
}
fn matches_path(&self, path: &Path) -> bool {
let Some(path) = path.to_str() else {
return false;
};
if let Some(exclude) = &self.exclude_paths
&& exclude.is_match(path)
{
return false;
}
self.path_regex.is_match(path)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/util.rs | crates/test-utils/src/util.rs | use foundry_compilers::{
Project, ProjectCompileOutput, Vyper, project_util::copy_dir, utils::RuntimeOrHandle,
};
use foundry_config::Config;
use std::{
env,
fs::{self, File},
io::{IsTerminal, Read, Seek, Write},
path::{Path, PathBuf},
process::Command,
sync::LazyLock,
};
pub use crate::{ext::*, prj::*};
/// The commit of forge-std to use.
pub const FORGE_STD_REVISION: &str = include_str!("../../../testdata/forge-std-rev");
/// Stores whether `stdout` is a tty / terminal.
pub static IS_TTY: LazyLock<bool> = LazyLock::new(|| std::io::stdout().is_terminal());
/// Global default template path. Contains the global template project from which all other
/// temp projects are initialized. See [`initialize()`] for more info.
static TEMPLATE_PATH: LazyLock<PathBuf> =
LazyLock::new(|| env::temp_dir().join("foundry-forge-test-template"));
/// Global default template lock. If its contents are not exactly `"1"`, the global template will
/// be re-initialized. See [`initialize()`] for more info.
static TEMPLATE_LOCK: LazyLock<PathBuf> =
LazyLock::new(|| env::temp_dir().join("foundry-forge-test-template.lock"));
/// The default Solc version used when compiling tests.
pub const SOLC_VERSION: &str = "0.8.30";
/// Another Solc version used when compiling tests.
///
/// Necessary to avoid downloading multiple versions.
pub const OTHER_SOLC_VERSION: &str = "0.8.26";
/// Initializes a project with `forge init` at the given path from a template directory.
///
/// This should be called after an empty project is created like in
/// [some of this crate's macros](crate::forgetest_init).
///
/// ## Note
///
/// This doesn't always run `forge init`, instead opting to copy an already-initialized template
/// project from a global template path. This is done to speed up tests.
///
/// This used to use a `static` `Lazy`, but this approach does not with `cargo-nextest` because it
/// runs each test in a separate process. Instead, we use a global lock file to ensure that only one
/// test can initialize the template at a time.
///
/// This sets the project's solc version to the [`SOLC_VERSION`].
pub fn initialize(target: &Path) {
test_debug!("initializing {}", target.display());
let tpath = TEMPLATE_PATH.as_path();
pretty_err(tpath, fs::create_dir_all(tpath));
// Initialize the global template if necessary.
let mut lock = crate::fd_lock::new_lock(TEMPLATE_LOCK.as_path());
let mut _read = lock.read().unwrap();
if !crate::fd_lock::lock_exists(TEMPLATE_LOCK.as_path()) {
// We are the first to acquire the lock:
// - initialize a new empty temp project;
// - run `forge init`;
// - run `forge build`;
// - copy it over to the global template;
// Ideally we would be able to initialize a temp project directly in the global template,
// but `TempProject` does not currently allow this: https://github.com/foundry-rs/compilers/issues/22
// Release the read lock and acquire a write lock, initializing the lock file.
drop(_read);
let mut write = lock.write().unwrap();
let mut data = Vec::new();
write.read_to_end(&mut data).unwrap();
if data != crate::fd_lock::LOCK_TOKEN {
// Initialize and build.
let (prj, mut cmd) = setup_forge("template", foundry_compilers::PathStyle::Dapptools);
test_debug!("- initializing template dir in {}", prj.root().display());
cmd.args(["init", "--force", "--empty"]).assert_success();
prj.write_config(Config {
solc: Some(foundry_config::SolcReq::Version(SOLC_VERSION.parse().unwrap())),
..Default::default()
});
// Checkout forge-std.
let output = Command::new("git")
.current_dir(prj.root().join("lib/forge-std"))
.args(["checkout", FORGE_STD_REVISION])
.output()
.expect("failed to checkout forge-std");
assert!(output.status.success(), "{output:#?}");
// Build the project.
cmd.forge_fuse().arg("build").assert_success();
// Remove the existing template, if any.
let _ = fs::remove_dir_all(tpath);
// Copy the template to the global template path.
pretty_err(tpath, copy_dir(prj.root(), tpath));
// Update lockfile to mark that template is initialized.
write.set_len(0).unwrap();
write.seek(std::io::SeekFrom::Start(0)).unwrap();
write.write_all(crate::fd_lock::LOCK_TOKEN).unwrap();
}
// Release the write lock and acquire a new read lock.
drop(write);
_read = lock.read().unwrap();
}
test_debug!("- copying template dir from {}", tpath.display());
pretty_err(target, fs::create_dir_all(target));
pretty_err(target, copy_dir(tpath, target));
}
/// Compile the project with a lock for the cache.
pub fn get_compiled(project: &mut Project) -> ProjectCompileOutput {
let lock_file_path = project.sources_path().join(".lock");
// We need to use a file lock because `cargo-nextest` runs tests in different processes.
// This is similar to `initialize`, see its comments for more details.
let mut lock = crate::fd_lock::new_lock(&lock_file_path);
let read = lock.read().unwrap();
let out;
let mut write = None;
if !project.cache_path().exists() || !crate::fd_lock::lock_exists(&lock_file_path) {
drop(read);
write = Some(lock.write().unwrap());
test_debug!("cache miss for {}", lock_file_path.display());
} else {
test_debug!("cache hit for {}", lock_file_path.display());
}
if project.compiler.vyper.is_none() {
project.compiler.vyper = Some(get_vyper());
}
test_debug!("compiling {}", lock_file_path.display());
out = project.compile().unwrap();
test_debug!("compiled {}", lock_file_path.display());
if out.has_compiler_errors() {
panic!("Compiled with errors:\n{out}");
}
if let Some(write) = &mut write {
write.write_all(crate::fd_lock::LOCK_TOKEN).unwrap();
}
out
}
/// Installs Vyper if it's not already present.
pub fn get_vyper() -> Vyper {
static VYPER: LazyLock<PathBuf> = LazyLock::new(|| std::env::temp_dir().join("vyper"));
if let Ok(vyper) = Vyper::new("vyper") {
return vyper;
}
if let Ok(vyper) = Vyper::new(&*VYPER) {
return vyper;
}
return RuntimeOrHandle::new().block_on(install());
async fn install() -> Vyper {
#[cfg(target_family = "unix")]
use std::{fs::Permissions, os::unix::fs::PermissionsExt};
let path = VYPER.as_path();
let mut file = File::create(path).unwrap();
if let Err(e) = file.try_lock() {
if let fs::TryLockError::WouldBlock = e {
file.lock().unwrap();
assert!(path.exists());
return Vyper::new(path).unwrap();
}
file.lock().unwrap();
}
let suffix = match svm::platform() {
svm::Platform::MacOsAarch64 => "darwin",
svm::Platform::LinuxAmd64 => "linux",
svm::Platform::WindowsAmd64 => "windows.exe",
platform => panic!(
"unsupported platform {platform:?} for installing vyper, \
install it manually and add it to $PATH"
),
};
let url = format!(
"https://github.com/vyperlang/vyper/releases/download/v0.4.3/vyper.0.4.3+commit.bff19ea2.{suffix}"
);
test_debug!("downloading vyper from {url}");
let res = reqwest::Client::builder().build().unwrap().get(url).send().await.unwrap();
assert!(res.status().is_success());
let bytes = res.bytes().await.unwrap();
file.write_all(&bytes).unwrap();
#[cfg(target_family = "unix")]
file.set_permissions(Permissions::from_mode(0o755)).unwrap();
Vyper::new(path).unwrap()
}
}
#[track_caller]
pub fn pretty_err<T, E: std::error::Error>(path: impl AsRef<Path>, res: Result<T, E>) -> T {
match res {
Ok(t) => t,
Err(err) => panic!("{}: {err}", path.as_ref().display()),
}
}
pub fn read_string(path: impl AsRef<Path>) -> String {
let path = path.as_ref();
pretty_err(path, std::fs::read_to_string(path))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/macros.rs | crates/test-utils/src/macros.rs | /// A macro to generate a new integration test case
///
/// The `forgetest!` macro's first argument is the name of the test, the second argument is a
/// closure to configure and execute the test. The `TestProject` provides utility functions to setup
/// the project's workspace. The `TestCommand` is a wrapper around the actual `forge` executable
/// that this then executed with the configured command arguments.
#[macro_export]
macro_rules! forgetest {
($(#[$attr:meta])* $test:ident, |$prj:ident, $cmd:ident| $e:expr) => {
$crate::forgetest!($(#[$attr])* $test, $crate::foundry_compilers::PathStyle::Dapptools, |$prj, $cmd| $e);
};
($(#[$attr:meta])* $test:ident, $style:expr, |$prj:ident, $cmd:ident| $e:expr) => {
#[expect(clippy::disallowed_macros)]
#[test]
$(#[$attr])*
fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_forge(stringify!($test), $style);
$e
}
};
}
#[macro_export]
macro_rules! forgetest_async {
($(#[$attr:meta])* $test:ident, |$prj:ident, $cmd:ident| $e:expr) => {
$crate::forgetest_async!($(#[$attr])* $test, $crate::foundry_compilers::PathStyle::Dapptools, |$prj, $cmd| $e);
};
($(#[$attr:meta])* $test:ident, $style:expr, |$prj:ident, $cmd:ident| $e:expr) => {
#[expect(clippy::disallowed_macros)]
#[tokio::test(flavor = "multi_thread")]
$(#[$attr])*
async fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_forge(stringify!($test), $style);
$e;
return (); // Works around weird method resolution in `$e` due to `#[tokio::test]`.
}
};
}
#[macro_export]
macro_rules! casttest {
($(#[$attr:meta])* $test:ident, $($async:ident)? |$prj:ident, $cmd:ident| $e:expr) => {
$crate::casttest!($(#[$attr])* $test, $crate::foundry_compilers::PathStyle::Dapptools, $($async)? |$prj, $cmd| $e);
};
($(#[$attr:meta])* $test:ident, $style:expr, |$prj:ident, $cmd:ident| $e:expr) => {
#[expect(clippy::disallowed_macros)]
#[test]
$(#[$attr])*
fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_cast(stringify!($test), $style);
$e
}
};
($(#[$attr:meta])* $test:ident, $style:expr, async |$prj:ident, $cmd:ident| $e:expr) => {
#[expect(clippy::disallowed_macros)]
#[tokio::test(flavor = "multi_thread")]
$(#[$attr])*
async fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_cast(stringify!($test), $style);
$e;
return (); // Works around weird method resolution in `$e` due to `#[tokio::test]`.
}
};
}
/// Same as `forgetest` but returns an already initialized project workspace (`forge init --empty`).
#[macro_export]
macro_rules! forgetest_init {
($(#[$attr:meta])* $test:ident, |$prj:ident, $cmd:ident| $e:expr) => {
$crate::forgetest_init!($(#[$attr])* $test, $crate::foundry_compilers::PathStyle::Dapptools, |$prj, $cmd| $e);
};
($(#[$attr:meta])* $test:ident, $style:expr, |$prj:ident, $cmd:ident| $e:expr) => {
#[test]
$(#[$attr])*
fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_forge(stringify!($test), $style);
$crate::util::initialize($prj.root());
$e
}
};
}
/// Setup forge soldeer
#[macro_export]
macro_rules! forgesoldeer {
($(#[$attr:meta])* $test:ident, |$prj:ident, $cmd:ident| $e:expr) => {
$crate::forgesoldeer!($(#[$attr])* $test, $crate::foundry_compilers::PathStyle::Dapptools, |$prj, $cmd| $e);
};
($(#[$attr:meta])* $test:ident, $style:expr, |$prj:ident, $cmd:ident| $e:expr) => {
#[expect(clippy::disallowed_macros)]
#[test]
$(#[$attr])*
fn $test() {
let (mut $prj, mut $cmd) = $crate::util::setup_forge(stringify!($test), $style);
$crate::util::initialize($prj.root());
$e
}
};
}
#[macro_export]
macro_rules! test_debug {
($($args:tt)*) => {
$crate::test_debug(format_args!($($args)*))
}
}
#[macro_export]
macro_rules! test_trace {
($($args:tt)*) => {
$crate::test_trace(format_args!($($args)*))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/rpc.rs | crates/test-utils/src/rpc.rs | //! RPC API keys utilities.
use foundry_config::{
NamedChain::{
self, Arbitrum, Base, BinanceSmartChainTestnet, Celo, Mainnet, Optimism, Polygon, Sepolia,
},
RpcEndpointUrl, RpcEndpoints,
};
use rand::seq::SliceRandom;
use std::{
env,
sync::{
LazyLock,
atomic::{AtomicUsize, Ordering},
},
};
macro_rules! shuffled_list {
($name:ident, $e:expr $(,)?) => {
static $name: LazyLock<ShuffledList<&'static str>> =
LazyLock::new(|| ShuffledList::new($e));
};
}
struct ShuffledList<T> {
list: Vec<T>,
index: AtomicUsize,
}
impl<T> ShuffledList<T> {
fn new(mut list: Vec<T>) -> Self {
assert!(!list.is_empty());
list.shuffle(&mut rand::rng());
Self { list, index: AtomicUsize::new(0) }
}
fn next(&self) -> &T {
let index = self.index.fetch_add(1, Ordering::Relaxed);
&self.list[index % self.list.len()]
}
}
shuffled_list!(
HTTP_ARCHIVE_DOMAINS,
vec![
//
"reth-ethereum.ithaca.xyz/rpc",
],
);
shuffled_list!(
HTTP_DOMAINS,
vec![
//
"reth-ethereum.ithaca.xyz/rpc",
// "reth-ethereum-full.ithaca.xyz/rpc",
],
);
shuffled_list!(
WS_ARCHIVE_DOMAINS,
vec![
//
"reth-ethereum.ithaca.xyz/ws",
],
);
shuffled_list!(
WS_DOMAINS,
vec![
//
"reth-ethereum.ithaca.xyz/ws",
// "reth-ethereum-full.ithaca.xyz/ws",
],
);
// List of general purpose DRPC keys to rotate through
shuffled_list!(
DRPC_KEYS,
vec![
"Agc9NK9-6UzYh-vQDDM80Tv0A5UnBkUR8I3qssvAG40d",
"AjUPUPonSEInt2CZ_7A-ai3hMyxxBlsR8I4EssvAG40d",
],
);
// List of etherscan keys.
shuffled_list!(
ETHERSCAN_KEYS,
vec![
"MCAUM7WPE9XP5UQMZPCKIBUJHPM1C24FP6",
"JW6RWCG2C5QF8TANH4KC7AYIF1CX7RB5D1",
"ZSMDY6BI2H55MBE3G9CUUQT4XYUDBB6ZSK",
"4FYHTY429IXYMJNS4TITKDMUKW5QRYDX61",
"QYKNT5RHASZ7PGQE68FNQWH99IXVTVVD2I",
"VXMQ117UN58Y4RHWUB8K1UGCEA7UQEWK55",
"C7I2G4JTA5EPYS42Z8IZFEIMQNI5GXIJEV",
"A15KZUMZXXCK1P25Y1VP1WGIVBBHIZDS74",
"3IA6ASNQXN8WKN7PNFX7T72S9YG56X9FPG",
],
);
/// the RPC endpoints used during tests
pub fn rpc_endpoints() -> RpcEndpoints {
RpcEndpoints::new([
("mainnet", RpcEndpointUrl::Url(next_http_archive_rpc_url())),
("mainnet2", RpcEndpointUrl::Url(next_http_archive_rpc_url())),
("sepolia", RpcEndpointUrl::Url(next_rpc_endpoint(NamedChain::Sepolia))),
("optimism", RpcEndpointUrl::Url(next_rpc_endpoint(NamedChain::Optimism))),
("arbitrum", RpcEndpointUrl::Url(next_rpc_endpoint(NamedChain::Arbitrum))),
("polygon", RpcEndpointUrl::Url(next_rpc_endpoint(NamedChain::Polygon))),
("bsc", RpcEndpointUrl::Url(next_rpc_endpoint(NamedChain::BinanceSmartChain))),
("avaxTestnet", RpcEndpointUrl::Url("https://api.avax-test.network/ext/bc/C/rpc".into())),
("moonbeam", RpcEndpointUrl::Url("https://moonbeam-rpc.publicnode.com".into())),
("rpcEnvAlias", RpcEndpointUrl::Env("${RPC_ENV_ALIAS}".into())),
])
}
/// Returns the next _mainnet_ rpc URL in inline
///
/// This will rotate all available rpc endpoints
pub fn next_http_rpc_endpoint() -> String {
next_rpc_endpoint(NamedChain::Mainnet)
}
/// Returns the next _mainnet_ rpc URL in inline
///
/// This will rotate all available rpc endpoints
pub fn next_ws_rpc_endpoint() -> String {
next_ws_endpoint(NamedChain::Mainnet)
}
/// Returns the next HTTP RPC URL.
pub fn next_rpc_endpoint(chain: NamedChain) -> String {
next_url(false, chain)
}
/// Returns the next WS RPC URL.
pub fn next_ws_endpoint(chain: NamedChain) -> String {
next_url(true, chain)
}
/// Returns a websocket URL that has access to archive state
pub fn next_http_archive_rpc_url() -> String {
next_archive_url(false)
}
/// Returns an HTTP URL that has access to archive state
pub fn next_ws_archive_rpc_url() -> String {
next_archive_url(true)
}
/// Returns a URL that has access to archive state.
fn next_archive_url(is_ws: bool) -> String {
let domain = if is_ws { &WS_ARCHIVE_DOMAINS } else { &HTTP_ARCHIVE_DOMAINS }.next();
let url = if is_ws { format!("wss://{domain}") } else { format!("https://{domain}") };
test_debug!("next_archive_url(is_ws={is_ws}) = {}", debug_url(&url));
url
}
/// Returns the next etherscan api key.
pub fn next_etherscan_api_key() -> String {
let mut key = env::var("ETHERSCAN_KEY").unwrap_or_default();
if key.is_empty() {
key = ETHERSCAN_KEYS.next().to_string();
}
test_debug!("next_etherscan_api_key() = {}...", &key[..6]);
key
}
fn next_url(is_ws: bool, chain: NamedChain) -> String {
let url = next_url_inner(is_ws, chain);
test_debug!("next_url(is_ws={is_ws}, chain={chain:?}) = {}", debug_url(&url));
url
}
fn next_url_inner(is_ws: bool, chain: NamedChain) -> String {
if matches!(chain, Base) {
return "https://mainnet.base.org".to_string();
}
if matches!(chain, Optimism) {
return "https://mainnet.optimism.io".to_string();
}
if matches!(chain, BinanceSmartChainTestnet) {
return "https://bsc-testnet-rpc.publicnode.com".to_string();
}
if matches!(chain, Celo) {
return "https://celo.drpc.org".to_string();
}
if matches!(chain, Sepolia) {
let rpc_url = env::var("ETH_SEPOLIA_RPC").unwrap_or_default();
if !rpc_url.is_empty() {
return rpc_url;
}
}
if matches!(chain, Arbitrum) {
let rpc_url = env::var("ARBITRUM_RPC").unwrap_or_default();
if !rpc_url.is_empty() {
return rpc_url;
}
}
let reth_works = true;
let domain = if reth_works && matches!(chain, Mainnet) {
*(if is_ws { &WS_DOMAINS } else { &HTTP_DOMAINS }).next()
} else {
// DRPC for other networks used in tests.
let key = DRPC_KEYS.next();
let network = match chain {
Mainnet => "ethereum",
Polygon => "polygon",
Arbitrum => "arbitrum",
Sepolia => "sepolia",
_ => "",
};
&format!("lb.drpc.org/ogrpc?network={network}&dkey={key}")
};
if is_ws { format!("wss://{domain}") } else { format!("https://{domain}") }
}
/// Basic redaction for debugging RPC URLs.
fn debug_url(url: &str) -> impl std::fmt::Display + '_ {
let url = reqwest::Url::parse(url).unwrap();
format!(
"{scheme}://{host}{path}",
scheme = url.scheme(),
host = url.host_str().unwrap(),
path = url.path().get(..8).unwrap_or(url.path()),
)
}
#[cfg(test)]
#[expect(clippy::disallowed_macros)]
mod tests {
use super::*;
use alloy_primitives::address;
use foundry_config::Chain;
#[tokio::test]
#[ignore = "run manually"]
async fn test_etherscan_keys() {
let address = address!("0xdAC17F958D2ee523a2206206994597C13D831ec7");
let mut first_abi = None;
let mut failed = Vec::new();
for (i, &key) in ETHERSCAN_KEYS.list.iter().enumerate() {
println!("trying key {i} ({key})");
let client = foundry_block_explorers::Client::builder()
.chain(Chain::mainnet())
.unwrap()
.with_api_key(key)
.build()
.unwrap();
let mut fail = |e: &str| {
eprintln!("key {i} ({key}) failed: {e}");
failed.push(key);
};
let abi = match client.contract_abi(address).await {
Ok(abi) => abi,
Err(e) => {
fail(&e.to_string());
continue;
}
};
if let Some(first_abi) = &first_abi {
if abi != *first_abi {
fail("abi mismatch");
}
} else {
first_abi = Some(abi);
}
}
if !failed.is_empty() {
panic!("failed keys: {failed:#?}");
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/script.rs | crates/test-utils/src/script.rs | use crate::{TestCommand, init_tracing, util::lossy_string};
use alloy_primitives::{Address, address};
use alloy_provider::Provider;
use eyre::Result;
use foundry_common::provider::{RetryProvider, get_http_provider};
use std::{
collections::BTreeMap,
fs,
path::{Path, PathBuf},
};
const BROADCAST_TEST_PATH: &str = "src/Broadcast.t.sol";
const TESTDATA: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../../testdata");
fn init_script_cmd(
cmd: &mut TestCommand,
project_root: &Path,
target_contract: &str,
endpoint: Option<&str>,
) {
cmd.forge_fuse();
cmd.set_current_dir(project_root);
cmd.args(["script", target_contract, "--root", project_root.to_str().unwrap(), "-vvvvv"]);
if let Some(rpc_url) = endpoint {
cmd.args(["--fork-url", rpc_url]);
}
}
/// A helper struct to test forge script scenarios
pub struct ScriptTester {
pub accounts_pub: Vec<Address>,
pub accounts_priv: Vec<String>,
pub provider: Option<RetryProvider>,
pub nonces: BTreeMap<u32, u64>,
pub address_nonces: BTreeMap<Address, u64>,
pub cmd: TestCommand,
pub project_root: PathBuf,
pub target_contract: String,
pub endpoint: Option<String>,
}
impl ScriptTester {
/// Creates a new instance of a Tester for the given contract
pub fn new(
mut cmd: TestCommand,
endpoint: Option<&str>,
project_root: &Path,
target_contract: &str,
) -> Self {
init_tracing();
Self::copy_testdata(project_root).unwrap();
init_script_cmd(&mut cmd, project_root, target_contract, endpoint);
let mut provider = None;
if let Some(endpoint) = endpoint {
provider = Some(get_http_provider(endpoint))
}
Self {
accounts_pub: vec![
address!("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"),
address!("0x70997970C51812dc3A010C7d01b50e0d17dc79C8"),
address!("0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC"),
],
accounts_priv: vec![
"ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80".to_string(),
"59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d".to_string(),
"5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a".to_string(),
],
provider,
nonces: BTreeMap::default(),
address_nonces: BTreeMap::default(),
cmd,
project_root: project_root.to_path_buf(),
target_contract: target_contract.to_string(),
endpoint: endpoint.map(|s| s.to_string()),
}
}
/// Creates a new instance of a Tester for the `broadcast` test at the given `project_root` by
/// configuring the `TestCommand` with script
pub fn new_broadcast(cmd: TestCommand, endpoint: &str, project_root: &Path) -> Self {
let target_contract = project_root.join(BROADCAST_TEST_PATH).to_string_lossy().to_string();
// copy the broadcast test
fs::copy(
Self::testdata_path().join("default/cheats/Broadcast.t.sol"),
project_root.join(BROADCAST_TEST_PATH),
)
.expect("Failed to initialize broadcast contract");
Self::new(cmd, Some(endpoint), project_root, &target_contract)
}
/// Creates a new instance of a Tester for the `broadcast` test at the given `project_root` by
/// configuring the `TestCommand` with script without an endpoint
pub fn new_broadcast_without_endpoint(cmd: TestCommand, project_root: &Path) -> Self {
let target_contract = project_root.join(BROADCAST_TEST_PATH).to_string_lossy().to_string();
// copy the broadcast test
let testdata = Self::testdata_path();
fs::copy(
testdata.join("default/cheats/Broadcast.t.sol"),
project_root.join(BROADCAST_TEST_PATH),
)
.expect("Failed to initialize broadcast contract");
Self::new(cmd, None, project_root, &target_contract)
}
/// Returns the path to the dir that contains testdata
fn testdata_path() -> &'static Path {
Path::new(TESTDATA)
}
/// Initialises the test contracts by copying them into the workspace
fn copy_testdata(root: &Path) -> Result<()> {
let testdata = Self::testdata_path();
let from_dir = testdata.join("utils");
let to_dir = root.join("utils");
fs::create_dir_all(&to_dir)?;
for entry in fs::read_dir(&from_dir)? {
let file = &entry?.path();
let name = file.file_name().unwrap();
fs::copy(file, to_dir.join(name))?;
}
Ok(())
}
pub async fn load_private_keys(&mut self, keys_indexes: &[u32]) -> &mut Self {
for &index in keys_indexes {
self.cmd.args(["--private-keys", &self.accounts_priv[index as usize]]);
if let Some(provider) = &self.provider {
let nonce = provider
.get_transaction_count(self.accounts_pub[index as usize])
.await
.unwrap();
self.nonces.insert(index, nonce);
}
}
self
}
pub async fn load_addresses(&mut self, addresses: &[Address]) -> &mut Self {
for &address in addresses {
let nonce =
self.provider.as_ref().unwrap().get_transaction_count(address).await.unwrap();
self.address_nonces.insert(address, nonce);
}
self
}
pub fn add_deployer(&mut self, index: u32) -> &mut Self {
self.sender(self.accounts_pub[index as usize])
}
/// Adds given address as sender
pub fn sender(&mut self, addr: Address) -> &mut Self {
self.args(&["--sender", &addr.to_string()])
}
pub fn add_sig(&mut self, contract_name: &str, sig: &str) -> &mut Self {
self.args(&["--tc", contract_name, "--sig", sig])
}
pub fn add_create2_deployer(&mut self, create2_deployer: Address) -> &mut Self {
self.args(&["--create2-deployer", &create2_deployer.to_string()])
}
/// Adds the `--unlocked` flag
pub fn unlocked(&mut self) -> &mut Self {
self.arg("--unlocked")
}
pub fn simulate(&mut self, expected: ScriptOutcome) -> &mut Self {
self.run(expected)
}
pub fn broadcast(&mut self, expected: ScriptOutcome) -> &mut Self {
self.arg("--broadcast").run(expected)
}
pub fn resume(&mut self, expected: ScriptOutcome) -> &mut Self {
self.arg("--resume").run(expected)
}
/// `[(private_key_slot, expected increment)]`
pub async fn assert_nonce_increment(&mut self, keys_indexes: &[(u32, u32)]) -> &mut Self {
for &(private_key_slot, expected_increment) in keys_indexes {
let addr = self.accounts_pub[private_key_slot as usize];
let nonce = self.provider.as_ref().unwrap().get_transaction_count(addr).await.unwrap();
let prev_nonce = self.nonces.get(&private_key_slot).unwrap();
assert_eq!(
nonce,
(*prev_nonce + expected_increment as u64),
"nonce not incremented correctly for {addr}: \
{prev_nonce} + {expected_increment} != {nonce}"
);
}
self
}
/// In Vec<(address, expected increment)>
pub async fn assert_nonce_increment_addresses(
&mut self,
address_indexes: &[(Address, u32)],
) -> &mut Self {
for (address, expected_increment) in address_indexes {
let nonce =
self.provider.as_ref().unwrap().get_transaction_count(*address).await.unwrap();
let prev_nonce = self.address_nonces.get(address).unwrap();
assert_eq!(nonce, *prev_nonce + *expected_increment as u64);
}
self
}
pub fn run(&mut self, expected: ScriptOutcome) -> &mut Self {
let out = self.cmd.execute();
let (stdout, stderr) = (lossy_string(&out.stdout), lossy_string(&out.stderr));
trace!(target: "tests", "STDOUT\n{stdout}\n\nSTDERR\n{stderr}");
if !stdout.contains(expected.as_str()) && !stderr.contains(expected.as_str()) {
panic!(
"--STDOUT--\n{stdout}\n\n--STDERR--\n{stderr}\n\n--EXPECTED--\n{:?} not found in stdout or stderr",
expected.as_str()
);
}
self
}
pub fn slow(&mut self) -> &mut Self {
self.arg("--slow")
}
pub fn arg(&mut self, arg: &str) -> &mut Self {
self.cmd.arg(arg);
self
}
pub fn args(&mut self, args: &[&str]) -> &mut Self {
self.cmd.args(args);
self
}
pub fn clear(&mut self) {
init_script_cmd(
&mut self.cmd,
&self.project_root,
&self.target_contract,
self.endpoint.as_deref(),
);
self.nonces.clear();
self.address_nonces.clear();
}
}
/// Various `forge` script results
#[derive(Debug)]
pub enum ScriptOutcome {
OkNoEndpoint,
OkSimulation,
OkBroadcast,
WarnSpecifyDeployer,
MissingSender,
MissingWallet,
StaticCallNotAllowed,
ScriptFailed,
UnsupportedLibraries,
ErrorSelectForkOnBroadcast,
OkRun,
}
impl ScriptOutcome {
pub fn as_str(&self) -> &'static str {
match self {
Self::OkNoEndpoint => "If you wish to simulate on-chain transactions pass a RPC URL.",
Self::OkSimulation => "SIMULATION COMPLETE. To broadcast these",
Self::OkBroadcast => "ONCHAIN EXECUTION COMPLETE & SUCCESSFUL",
Self::WarnSpecifyDeployer => {
"Warning: You have more than one deployer who could predeploy libraries. Using `--sender` instead."
}
Self::MissingSender => {
"You seem to be using Foundry's default sender. Be sure to set your own --sender"
}
Self::MissingWallet => "No associated wallet",
Self::StaticCallNotAllowed => {
"staticcall`s are not allowed after `broadcast`; use `startBroadcast` instead"
}
Self::ScriptFailed => "script failed: ",
Self::UnsupportedLibraries => {
"Multi chain deployment does not support library linking at the moment."
}
Self::ErrorSelectForkOnBroadcast => "cannot select forks during a broadcast",
Self::OkRun => "Script ran successfully",
}
}
pub fn is_err(&self) -> bool {
match self {
Self::OkNoEndpoint
| Self::OkSimulation
| Self::OkBroadcast
| Self::WarnSpecifyDeployer
| Self::OkRun => false,
Self::MissingSender
| Self::MissingWallet
| Self::StaticCallNotAllowed
| Self::UnsupportedLibraries
| Self::ErrorSelectForkOnBroadcast
| Self::ScriptFailed => true,
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/ui_runner.rs | crates/test-utils/src/ui_runner.rs | use std::path::Path;
use ui_test::{
spanned::Spanned,
status_emitter::{Gha, StatusEmitter},
};
/// Test runner based on `ui_test`. Adapted from `https://github.com/paradigmxyz/solar/blob/main/tools/tester/src/lib.rs`.
pub fn run_tests<'a>(cmd: &str, cmd_path: &'a Path, testdata: &'a Path) -> eyre::Result<()> {
ui_test::color_eyre::install()?;
let mut args = ui_test::Args::test()?;
// Fast path for `--list`, invoked by `cargo-nextest`.
{
let mut dummy_config = ui_test::Config::dummy();
dummy_config.with_args(&args);
if ui_test::nextest::emulate(&mut vec![dummy_config]) {
return Ok(());
}
}
// Condense output if not explicitly requested.
let requested_pretty = || std::env::args().any(|x| x.contains("--format"));
if matches!(args.format, ui_test::Format::Pretty) && !requested_pretty() {
args.format = ui_test::Format::Terse;
}
let config = config(cmd, cmd_path, &args, testdata);
let text_emitter: Box<dyn StatusEmitter> = args.format.into();
let gha_emitter = Gha { name: "Foundry Lint UI".to_string(), group: true };
let status_emitter = (text_emitter, gha_emitter);
// run tests on all .sol files
ui_test::run_tests_generic(
vec![config],
move |path, _config| Some(path.extension().is_some_and(|ext| ext == "sol")),
per_file_config,
status_emitter,
)?;
Ok(())
}
fn config<'a>(
cmd: &str,
cmd_path: &'a Path,
args: &ui_test::Args,
testdata: &'a Path,
) -> ui_test::Config {
let root = testdata.parent().unwrap();
assert!(
testdata.exists(),
"testdata directory does not exist: {};\n\
you may need to initialize submodules: `git submodule update --init --checkout`",
testdata.display()
);
let mut config = ui_test::Config {
host: Some(get_host().to_string()),
target: None,
root_dir: testdata.into(),
program: ui_test::CommandBuilder {
program: cmd_path.into(),
args: {
let args = vec![cmd, "--json", "--root", testdata.to_str().expect("invalid root")];
args.into_iter().map(Into::into).collect()
},
out_dir_flag: None,
input_file_flag: None,
envs: vec![("FOUNDRY_LINT_UI_TESTING".into(), Some("1".into()))],
cfg_flag: None,
},
output_conflict_handling: ui_test::error_on_output_conflict,
bless_command: Some("cargo test -p forge --test ui -- --bless".into()),
out_dir: root.join("target").join("ui"),
comment_start: "//",
diagnostic_extractor: ui_test::diagnostics::rustc::rustc_diagnostics_extractor,
..ui_test::Config::dummy()
};
macro_rules! register_custom_flags {
($($ty:ty),* $(,)?) => {
$(
config.custom_comments.insert(<$ty>::NAME, <$ty>::parse);
if let Some(default) = <$ty>::DEFAULT {
config.comment_defaults.base().add_custom(<$ty>::NAME, default);
}
)*
};
}
register_custom_flags![];
config.comment_defaults.base().exit_status = None.into();
config.comment_defaults.base().require_annotations = Spanned::dummy(true).into();
config.comment_defaults.base().require_annotations_for_level =
Spanned::dummy(ui_test::diagnostics::Level::Warn).into();
let filters = [
(ui_test::Match::PathBackslash, b"/".to_vec()),
#[cfg(windows)]
(ui_test::Match::Exact(vec![b'\r']), b"".to_vec()),
#[cfg(windows)]
(ui_test::Match::Exact(br"\\?\".to_vec()), b"".to_vec()),
(root.into(), b"ROOT".to_vec()),
];
config.comment_defaults.base().normalize_stderr.extend(filters.iter().cloned());
config.comment_defaults.base().normalize_stdout.extend(filters);
let filters: &[(&str, &str)] = &[
// Erase line and column info.
(r"\.(\w+):[0-9]+:[0-9]+(: [0-9]+:[0-9]+)?", ".$1:LL:CC"),
];
for &(pattern, replacement) in filters {
config.filter(pattern, replacement);
}
let stdout_filters: &[(&str, &str)] =
&[(&env!("CARGO_PKG_VERSION").replace(".", r"\."), "VERSION")];
for &(pattern, replacement) in stdout_filters {
config.stdout_filter(pattern, replacement);
}
let stderr_filters: &[(&str, &str)] = &[];
for &(pattern, replacement) in stderr_filters {
config.stderr_filter(pattern, replacement);
}
config.with_args(args);
config
}
fn per_file_config(config: &mut ui_test::Config, file: &Spanned<Vec<u8>>) {
let Ok(src) = std::str::from_utf8(&file.content) else {
return;
};
assert_eq!(config.comment_start, "//");
let has_annotations = src.contains("//~");
config.comment_defaults.base().require_annotations = Spanned::dummy(has_annotations).into();
let code = if has_annotations && src.contains("ERROR:") { 1 } else { 0 };
config.comment_defaults.base().exit_status = Spanned::dummy(code).into();
}
fn get_host() -> &'static str {
static CACHE: std::sync::OnceLock<String> = std::sync::OnceLock::new();
CACHE.get_or_init(|| {
let mut config = ui_test::Config::dummy();
config.program = ui_test::CommandBuilder::rustc();
config.fill_host_and_target().unwrap();
config.host.unwrap()
})
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/test-utils/src/fd_lock.rs | crates/test-utils/src/fd_lock.rs | //! File locking utilities.
use crate::util::pretty_err;
use std::{
fs::{File, OpenOptions},
path::Path,
};
pub use fd_lock::*;
/// Creates a new lock file at the given path.
pub fn new_lock(lock_path: impl AsRef<Path>) -> RwLock<File> {
fn new_lock(lock_path: &Path) -> RwLock<File> {
let lock_file = pretty_err(
lock_path,
OpenOptions::new().read(true).write(true).create(true).truncate(false).open(lock_path),
);
RwLock::new(lock_file)
}
new_lock(lock_path.as_ref())
}
pub(crate) const LOCK_TOKEN: &[u8] = b"1";
pub(crate) fn lock_exists(lock_path: &Path) -> bool {
std::fs::read(lock_path).is_ok_and(|b| b == LOCK_TOKEN)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/sequence.rs | crates/script/src/sequence.rs | use crate::multi_sequence::MultiChainSequence;
use eyre::Result;
use forge_script_sequence::{ScriptSequence, TransactionWithMetadata};
use foundry_cli::utils::Git;
use foundry_common::fmt::UIfmt;
use foundry_compilers::ArtifactId;
use foundry_config::Config;
use std::{
fmt::{Error, Write},
path::Path,
};
/// Format transaction details for display
fn format_transaction(index: usize, tx: &TransactionWithMetadata) -> Result<String, Error> {
let mut output = String::new();
writeln!(output, "### Transaction {index} ###")?;
writeln!(output, "{}", tx.tx().pretty())?;
// Show contract name and address if available
if !tx.opcode.is_any_create()
&& let (Some(name), Some(addr)) = (&tx.contract_name, &tx.contract_address)
{
writeln!(output, "contract: {name}({addr})")?;
}
// Show decoded function if available
if let (Some(func), Some(args)) = (&tx.function, &tx.arguments) {
if args.is_empty() {
writeln!(output, "data (decoded): {func}()")?;
} else {
writeln!(output, "data (decoded): {func}(")?;
for (i, arg) in args.iter().enumerate() {
writeln!(&mut output, " {}{}", arg, if i + 1 < args.len() { "," } else { "" })?;
}
writeln!(output, ")")?;
}
}
writeln!(output)?;
Ok(output)
}
/// Returns the commit hash of the project if it exists
pub fn get_commit_hash(root: &Path) -> Option<String> {
Git::new(root).commit_hash(true, "HEAD").ok()
}
pub enum ScriptSequenceKind {
Single(ScriptSequence),
Multi(MultiChainSequence),
}
impl ScriptSequenceKind {
pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> {
match self {
Self::Single(sequence) => sequence.save(silent, save_ts),
Self::Multi(sequence) => sequence.save(silent, save_ts),
}
}
pub fn sequences(&self) -> &[ScriptSequence] {
match self {
Self::Single(sequence) => std::slice::from_ref(sequence),
Self::Multi(sequence) => &sequence.deployments,
}
}
pub fn sequences_mut(&mut self) -> &mut [ScriptSequence] {
match self {
Self::Single(sequence) => std::slice::from_mut(sequence),
Self::Multi(sequence) => &mut sequence.deployments,
}
}
/// Updates underlying sequence paths to not be under /dry-run directory.
pub fn update_paths_to_broadcasted(
&mut self,
config: &Config,
sig: &str,
target: &ArtifactId,
) -> Result<()> {
match self {
Self::Single(sequence) => {
sequence.paths =
Some(ScriptSequence::get_paths(config, sig, target, sequence.chain, false)?);
}
Self::Multi(sequence) => {
(sequence.path, sequence.sensitive_path) =
MultiChainSequence::get_paths(config, sig, target, false)?;
}
};
Ok(())
}
pub fn show_transactions(&self) -> Result<()> {
for sequence in self.sequences() {
if !sequence.transactions.is_empty() {
sh_println!("\nChain {}\n", sequence.chain)?;
for (i, tx) in sequence.transactions.iter().enumerate() {
sh_print!("{}", format_transaction(i + 1, tx)?)?;
}
}
}
Ok(())
}
}
impl Drop for ScriptSequenceKind {
fn drop(&mut self) {
if let Err(err) = self.save(false, true) {
error!(?err, "could not save deployment sequence");
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/lib.rs | crates/script/src/lib.rs | //! # foundry-script
//!
//! Smart contract scripting.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate foundry_common;
#[macro_use]
extern crate tracing;
use crate::runner::ScriptRunner;
use alloy_json_abi::{Function, JsonAbi};
use alloy_primitives::{
Address, Bytes, Log, TxKind, U256, hex,
map::{AddressHashMap, HashMap},
};
use alloy_signer::Signer;
use broadcast::next_nonce;
use build::PreprocessedState;
use clap::{Parser, ValueHint};
use dialoguer::Confirm;
use eyre::{ContextCompat, Result};
use forge_script_sequence::{AdditionalContract, NestedValue};
use forge_verify::{RetryArgs, VerifierArgs};
use foundry_cli::{
opts::{BuildOpts, EvmArgs, GlobalArgs},
utils::LoadConfig,
};
use foundry_common::{
CONTRACT_MAX_SIZE, ContractsByArtifact, SELECTOR_LEN,
abi::{encode_function_args, get_func},
shell,
};
use foundry_compilers::ArtifactId;
use foundry_config::{
Config, figment,
figment::{
Metadata, Profile, Provider,
value::{Dict, Map},
},
};
use foundry_evm::{
backend::Backend,
core::Breakpoints,
executors::ExecutorBuilder,
inspectors::{
CheatsConfig,
cheatcodes::{BroadcastableTransactions, Wallets},
},
opts::EvmOpts,
traces::{TraceMode, Traces},
};
use foundry_wallets::MultiWalletOpts;
use serde::Serialize;
use std::path::PathBuf;
mod broadcast;
mod build;
mod execute;
mod multi_sequence;
mod progress;
mod providers;
mod receipts;
mod runner;
mod sequence;
mod simulate;
mod transaction;
mod verify;
// Loads project's figment and merges the build cli arguments into it
foundry_config::merge_impl_figment_convert!(ScriptArgs, build, evm);
/// CLI arguments for `forge script`.
#[derive(Clone, Debug, Default, Parser)]
pub struct ScriptArgs {
// Include global options for users of this struct.
#[command(flatten)]
pub global: GlobalArgs,
/// The contract you want to run. Either the file path or contract name.
///
/// If multiple contracts exist in the same file you must specify the target contract with
/// --target-contract.
#[arg(value_hint = ValueHint::FilePath)]
pub path: String,
/// Arguments to pass to the script function.
pub args: Vec<String>,
/// The name of the contract you want to run.
#[arg(long, visible_alias = "tc", value_name = "CONTRACT_NAME")]
pub target_contract: Option<String>,
/// The signature of the function you want to call in the contract, or raw calldata.
#[arg(long, short, default_value = "run")]
pub sig: String,
/// Max priority fee per gas for EIP1559 transactions.
#[arg(
long,
env = "ETH_PRIORITY_GAS_PRICE",
value_parser = foundry_cli::utils::parse_ether_value,
value_name = "PRICE"
)]
pub priority_gas_price: Option<U256>,
/// Use legacy transactions instead of EIP1559 ones.
///
/// This is auto-enabled for common networks without EIP1559.
#[arg(long)]
pub legacy: bool,
/// Broadcasts the transactions.
#[arg(long)]
pub broadcast: bool,
/// Batch size of transactions.
///
/// This is ignored and set to 1 if batching is not available or `--slow` is enabled.
#[arg(long, default_value = "100")]
pub batch_size: usize,
/// Skips on-chain simulation.
#[arg(long)]
pub skip_simulation: bool,
/// Relative percentage to multiply gas estimates by.
#[arg(long, short, default_value = "130")]
pub gas_estimate_multiplier: u64,
/// Send via `eth_sendTransaction` using the `--sender` argument as sender.
#[arg(
long,
conflicts_with_all = &["private_key", "private_keys", "ledger", "trezor", "aws"],
)]
pub unlocked: bool,
/// Resumes submitting transactions that failed or timed-out previously.
///
/// It DOES NOT simulate the script again and it expects nonces to have remained the same.
///
/// Example: If transaction N has a nonce of 22, then the account should have a nonce of 22,
/// otherwise it fails.
#[arg(long)]
pub resume: bool,
/// If present, --resume or --verify will be assumed to be a multi chain deployment.
#[arg(long)]
pub multi: bool,
/// Open the script in the debugger.
///
/// Takes precedence over broadcast.
#[arg(long)]
pub debug: bool,
/// Dumps all debugger steps to file.
#[arg(
long,
requires = "debug",
value_hint = ValueHint::FilePath,
value_name = "PATH"
)]
pub dump: Option<PathBuf>,
/// Makes sure a transaction is sent,
/// only after its previous one has been confirmed and succeeded.
#[arg(long)]
pub slow: bool,
/// Disables interactive prompts that might appear when deploying big contracts.
///
/// For more info on the contract size limit, see EIP-170: <https://eips.ethereum.org/EIPS/eip-170>
#[arg(long)]
pub non_interactive: bool,
/// Disables the contract size limit during script execution.
#[arg(long)]
pub disable_code_size_limit: bool,
/// Disables the labels in the traces.
#[arg(long)]
pub disable_labels: bool,
/// The Etherscan (or equivalent) API key
#[arg(long, env = "ETHERSCAN_API_KEY", value_name = "KEY")]
pub etherscan_api_key: Option<String>,
/// Verifies all the contracts found in the receipts of a script, if any.
#[arg(long, requires = "broadcast")]
pub verify: bool,
/// Gas price for legacy transactions, or max fee per gas for EIP1559 transactions, either
/// specified in wei, or as a string with a unit type.
///
/// Examples: 1ether, 10gwei, 0.01ether
#[arg(
long,
env = "ETH_GAS_PRICE",
value_parser = foundry_cli::utils::parse_ether_value,
value_name = "PRICE",
)]
pub with_gas_price: Option<U256>,
/// Timeout to use for broadcasting transactions.
#[arg(long, env = "ETH_TIMEOUT")]
pub timeout: Option<u64>,
#[command(flatten)]
pub build: BuildOpts,
#[command(flatten)]
pub wallets: MultiWalletOpts,
#[command(flatten)]
pub evm: EvmArgs,
#[command(flatten)]
pub verifier: VerifierArgs,
#[command(flatten)]
pub retry: RetryArgs,
}
impl ScriptArgs {
pub async fn preprocess(self) -> Result<PreprocessedState> {
let script_wallets = Wallets::new(self.wallets.get_multi_wallet().await?, self.evm.sender);
let (config, mut evm_opts) = self.load_config_and_evm_opts()?;
if let Some(sender) = self.maybe_load_private_key()? {
evm_opts.sender = sender;
}
let script_config = ScriptConfig::new(config, evm_opts).await?;
Ok(PreprocessedState { args: self, script_config, script_wallets })
}
/// Executes the script
pub async fn run_script(self) -> Result<()> {
trace!(target: "script", "executing script command");
let state = self.preprocess().await?;
let create2_deployer = state.script_config.evm_opts.create2_deployer;
let compiled = state.compile()?;
// Move from `CompiledState` to `BundledState` either by resuming or executing and
// simulating script.
let bundled = if compiled.args.resume {
compiled.resume().await?
} else {
// Drive state machine to point at which we have everything needed for simulation.
let pre_simulation = compiled
.link()
.await?
.prepare_execution()
.await?
.execute()
.await?
.prepare_simulation()
.await?;
if pre_simulation.args.debug {
return match pre_simulation.args.dump.clone() {
Some(path) => pre_simulation.dump_debugger(&path),
None => pre_simulation.run_debugger(),
};
}
if shell::is_json() {
pre_simulation.show_json().await?;
} else {
pre_simulation.show_traces().await?;
}
// Ensure that we have transactions to simulate/broadcast, otherwise exit early to avoid
// hard error.
if pre_simulation
.execution_result
.transactions
.as_ref()
.is_none_or(|txs| txs.is_empty())
{
if pre_simulation.args.broadcast {
sh_warn!("No transactions to broadcast.")?;
}
return Ok(());
}
// Check if there are any missing RPCs and exit early to avoid hard error.
if pre_simulation.execution_artifacts.rpc_data.missing_rpc {
if !shell::is_json() {
sh_println!("\nIf you wish to simulate on-chain transactions pass a RPC URL.")?;
}
return Ok(());
}
pre_simulation.args.check_contract_sizes(
&pre_simulation.execution_result,
&pre_simulation.build_data.known_contracts,
create2_deployer,
)?;
pre_simulation.fill_metadata().await?.bundle().await?
};
// Exit early in case user didn't provide any broadcast/verify related flags.
if !bundled.args.should_broadcast() {
if !shell::is_json() {
if shell::verbosity() >= 4 {
sh_println!("\n=== Transactions that will be broadcast ===\n")?;
bundled.sequence.show_transactions()?;
}
sh_println!(
"\nSIMULATION COMPLETE. To broadcast these transactions, add --broadcast and wallet configuration(s) to the previous command. See forge script --help for more."
)?;
}
return Ok(());
}
// Exit early if something is wrong with verification options.
if bundled.args.verify {
bundled.verify_preflight_check()?;
}
// Wait for pending txes and broadcast others.
let broadcasted = bundled.wait_for_pending().await?.broadcast().await?;
if broadcasted.args.verify {
broadcasted.verify().await?;
}
Ok(())
}
/// In case the user has loaded *only* one private-key, we can assume that he's using it as the
/// `--sender`
fn maybe_load_private_key(&self) -> Result<Option<Address>> {
let maybe_sender = self
.wallets
.private_keys()?
.filter(|pks| pks.len() == 1)
.map(|pks| pks.first().unwrap().address());
Ok(maybe_sender)
}
/// Returns the Function and calldata based on the signature
///
/// If the `sig` is a valid human-readable function we find the corresponding function in the
/// `abi` If the `sig` is valid hex, we assume it's calldata and try to find the
/// corresponding function by matching the selector, first 4 bytes in the calldata.
///
/// Note: We assume that the `sig` is already stripped of its prefix, See [`ScriptArgs`]
fn get_method_and_calldata(&self, abi: &JsonAbi) -> Result<(Function, Bytes)> {
if let Ok(decoded) = hex::decode(&self.sig) {
let selector = &decoded[..SELECTOR_LEN];
let func =
abi.functions().find(|func| selector == &func.selector()[..]).ok_or_else(|| {
eyre::eyre!(
"Function selector `{}` not found in the ABI",
hex::encode(selector)
)
})?;
return Ok((func.clone(), decoded.into()));
}
let func = if self.sig.contains('(') {
let func = get_func(&self.sig)?;
abi.functions()
.find(|&abi_func| abi_func.selector() == func.selector())
.wrap_err(format!("Function `{}` is not implemented in your script.", self.sig))?
} else {
let matching_functions =
abi.functions().filter(|func| func.name == self.sig).collect::<Vec<_>>();
match matching_functions.len() {
0 => eyre::bail!("Function `{}` not found in the ABI", self.sig),
1 => matching_functions[0],
2.. => eyre::bail!(
"Multiple functions with the same name `{}` found in the ABI",
self.sig
),
}
};
let data = encode_function_args(func, &self.args)?;
Ok((func.clone(), data.into()))
}
/// Checks if the transaction is a deployment with either a size above the `CONTRACT_MAX_SIZE`
/// or specified `code_size_limit`.
///
/// If `self.broadcast` is enabled, it asks confirmation of the user. Otherwise, it just warns
/// the user.
fn check_contract_sizes(
&self,
result: &ScriptResult,
known_contracts: &ContractsByArtifact,
create2_deployer: Address,
) -> Result<()> {
// If disable-code-size-limit flag is enabled then skip the size check
if self.disable_code_size_limit {
return Ok(());
}
// (name, &init, &deployed)[]
let mut bytecodes: Vec<(String, &[u8], &[u8])> = vec![];
// From artifacts
for (artifact, contract) in known_contracts.iter() {
let Some(bytecode) = contract.bytecode() else { continue };
let Some(deployed_bytecode) = contract.deployed_bytecode() else { continue };
bytecodes.push((artifact.name.clone(), bytecode, deployed_bytecode));
}
// From traces
let create_nodes = result.traces.iter().flat_map(|(_, traces)| {
traces.nodes().iter().filter(|node| node.trace.kind.is_any_create())
});
let mut unknown_c = 0usize;
for node in create_nodes {
let init_code = &node.trace.data;
let deployed_code = &node.trace.output;
if !bytecodes.iter().any(|(_, b, _)| *b == init_code.as_ref()) {
bytecodes.push((format!("Unknown{unknown_c}"), init_code, deployed_code));
unknown_c += 1;
}
continue;
}
let mut prompt_user = false;
let max_size = match self.evm.env.code_size_limit {
Some(size) => size,
None => CONTRACT_MAX_SIZE,
};
for (data, to) in result.transactions.iter().flat_map(|txes| {
txes.iter().filter_map(|tx| {
tx.transaction
.input()
.filter(|data| data.len() > max_size)
.map(|data| (data, tx.transaction.to()))
})
}) {
let mut offset = 0;
// Find if it's a CREATE or CREATE2. Otherwise, skip transaction.
if let Some(TxKind::Call(to)) = to {
if to == create2_deployer {
// Size of the salt prefix.
offset = 32;
} else {
continue;
}
} else if let Some(TxKind::Create) = to {
// Pass
}
// Find artifact with a deployment code same as the data.
if let Some((name, _, deployed_code)) =
bytecodes.iter().find(|(_, init_code, _)| *init_code == &data[offset..])
{
let deployment_size = deployed_code.len();
if deployment_size > max_size {
prompt_user = self.should_broadcast();
sh_err!(
"`{name}` is above the contract size limit ({deployment_size} > {max_size})."
)?;
}
}
}
// Only prompt if we're broadcasting and we've not disabled interactivity.
if prompt_user
&& !self.non_interactive
&& !Confirm::new().with_prompt("Do you wish to continue?".to_string()).interact()?
{
eyre::bail!("User canceled the script.");
}
Ok(())
}
/// We only broadcast transactions if --broadcast, --resume, or --verify was passed.
fn should_broadcast(&self) -> bool {
self.broadcast || self.resume || self.verify
}
}
impl Provider for ScriptArgs {
fn metadata(&self) -> Metadata {
Metadata::named("Script Args Provider")
}
fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
let mut dict = Dict::default();
if let Some(ref etherscan_api_key) =
self.etherscan_api_key.as_ref().filter(|s| !s.trim().is_empty())
{
dict.insert(
"etherscan_api_key".to_string(),
figment::value::Value::from(etherscan_api_key.to_string()),
);
}
if let Some(timeout) = self.timeout {
dict.insert("transaction_timeout".to_string(), timeout.into());
}
Ok(Map::from([(Config::selected_profile(), dict)]))
}
}
#[derive(Default, Serialize, Clone)]
pub struct ScriptResult {
pub success: bool,
#[serde(rename = "raw_logs")]
pub logs: Vec<Log>,
pub traces: Traces,
pub gas_used: u64,
pub labeled_addresses: AddressHashMap<String>,
#[serde(skip)]
pub transactions: Option<BroadcastableTransactions>,
pub returned: Bytes,
pub address: Option<Address>,
#[serde(skip)]
pub breakpoints: Breakpoints,
}
impl ScriptResult {
pub fn get_created_contracts(
&self,
known_contracts: &ContractsByArtifact,
) -> Vec<AdditionalContract> {
self.traces
.iter()
.flat_map(|(_, traces)| {
traces.nodes().iter().filter_map(|node| {
if node.trace.kind.is_any_create() {
let init_code = node.trace.data.clone();
let contract_name = known_contracts
.find_by_creation_code(init_code.as_ref())
.map(|artifact| artifact.0.name.clone());
return Some(AdditionalContract {
opcode: node.trace.kind,
address: node.trace.address,
contract_name,
init_code,
});
}
None
})
})
.collect()
}
}
#[derive(Serialize)]
struct JsonResult<'a> {
logs: Vec<String>,
returns: &'a HashMap<String, NestedValue>,
#[serde(flatten)]
result: &'a ScriptResult,
}
#[derive(Clone, Debug)]
pub struct ScriptConfig {
pub config: Config,
pub evm_opts: EvmOpts,
pub sender_nonce: u64,
/// Maps a rpc url to a backend
pub backends: HashMap<String, Backend>,
}
impl ScriptConfig {
pub async fn new(config: Config, evm_opts: EvmOpts) -> Result<Self> {
let sender_nonce = if let Some(fork_url) = evm_opts.fork_url.as_ref() {
next_nonce(evm_opts.sender, fork_url, evm_opts.fork_block_number).await?
} else {
// dapptools compatibility
1
};
Ok(Self { config, evm_opts, sender_nonce, backends: HashMap::default() })
}
pub async fn update_sender(&mut self, sender: Address) -> Result<()> {
self.sender_nonce = if let Some(fork_url) = self.evm_opts.fork_url.as_ref() {
next_nonce(sender, fork_url, None).await?
} else {
// dapptools compatibility
1
};
self.evm_opts.sender = sender;
Ok(())
}
async fn get_runner(&mut self) -> Result<ScriptRunner> {
self._get_runner(None, false).await
}
async fn get_runner_with_cheatcodes(
&mut self,
known_contracts: ContractsByArtifact,
script_wallets: Wallets,
debug: bool,
target: ArtifactId,
) -> Result<ScriptRunner> {
self._get_runner(Some((known_contracts, script_wallets, target)), debug).await
}
async fn _get_runner(
&mut self,
cheats_data: Option<(ContractsByArtifact, Wallets, ArtifactId)>,
debug: bool,
) -> Result<ScriptRunner> {
trace!("preparing script runner");
let env = self.evm_opts.evm_env().await?;
let db = if let Some(fork_url) = self.evm_opts.fork_url.as_ref() {
match self.backends.get(fork_url) {
Some(db) => db.clone(),
None => {
let fork = self.evm_opts.get_fork(&self.config, env.clone());
let backend = Backend::spawn(fork)?;
self.backends.insert(fork_url.clone(), backend.clone());
backend
}
}
} else {
// It's only really `None`, when we don't pass any `--fork-url`. And if so, there is
// no need to cache it, since there won't be any onchain simulation that we'd need
// to cache the backend for.
Backend::spawn(None)?
};
// We need to enable tracing to decode contract names: local or external.
let mut builder = ExecutorBuilder::new()
.inspectors(|stack| {
stack
.trace_mode(if debug { TraceMode::Debug } else { TraceMode::Call })
.networks(self.evm_opts.networks)
.create2_deployer(self.evm_opts.create2_deployer)
})
.spec_id(self.config.evm_spec_id())
.gas_limit(self.evm_opts.gas_limit())
.legacy_assertions(self.config.legacy_assertions);
if let Some((known_contracts, script_wallets, target)) = cheats_data {
builder = builder.inspectors(|stack| {
stack
.cheatcodes(
CheatsConfig::new(
&self.config,
self.evm_opts.clone(),
Some(known_contracts),
Some(target),
)
.into(),
)
.wallets(script_wallets)
.enable_isolation(self.evm_opts.isolate)
});
}
Ok(ScriptRunner::new(builder.build(env, db), self.evm_opts.clone()))
}
}
#[cfg(test)]
mod tests {
use super::*;
use foundry_config::{NamedChain, UnresolvedEnvVarError};
use std::fs;
use tempfile::tempdir;
#[test]
fn can_parse_sig() {
let sig = "0x522bb704000000000000000000000000f39fd6e51aad88f6f4ce6ab8827279cfFFb92266";
let args = ScriptArgs::parse_from(["foundry-cli", "Contract.sol", "--sig", sig]);
assert_eq!(args.sig, sig);
}
#[test]
fn can_parse_unlocked() {
let args = ScriptArgs::parse_from([
"foundry-cli",
"Contract.sol",
"--sender",
"0x4e59b44847b379578588920ca78fbf26c0b4956c",
"--unlocked",
]);
assert!(args.unlocked);
let key = U256::ZERO;
let args = ScriptArgs::try_parse_from([
"foundry-cli",
"Contract.sol",
"--sender",
"0x4e59b44847b379578588920ca78fbf26c0b4956c",
"--unlocked",
"--private-key",
&key.to_string(),
]);
assert!(args.is_err());
}
#[test]
fn can_merge_script_config() {
let args = ScriptArgs::parse_from([
"foundry-cli",
"Contract.sol",
"--etherscan-api-key",
"goerli",
]);
let config = args.load_config().unwrap();
assert_eq!(config.etherscan_api_key, Some("goerli".to_string()));
}
#[test]
fn can_disable_code_size_limit() {
let args =
ScriptArgs::parse_from(["foundry-cli", "Contract.sol", "--disable-code-size-limit"]);
assert!(args.disable_code_size_limit);
let result = ScriptResult::default();
let contracts = ContractsByArtifact::default();
let create = Address::ZERO;
assert!(args.check_contract_sizes(&result, &contracts, create).is_ok());
}
#[test]
fn can_parse_verifier_url() {
let args = ScriptArgs::parse_from([
"foundry-cli",
"script",
"script/Test.s.sol:TestScript",
"--fork-url",
"http://localhost:8545",
"--verifier-url",
"http://localhost:3000/api/verify",
"--etherscan-api-key",
"blacksmith",
"--broadcast",
"--verify",
"-vvvvv",
]);
assert_eq!(
args.verifier.verifier_url,
Some("http://localhost:3000/api/verify".to_string())
);
}
#[test]
fn can_extract_code_size_limit() {
let args = ScriptArgs::parse_from([
"foundry-cli",
"script",
"script/Test.s.sol:TestScript",
"--fork-url",
"http://localhost:8545",
"--broadcast",
"--code-size-limit",
"50000",
]);
assert_eq!(args.evm.env.code_size_limit, Some(50000));
}
#[test]
fn can_extract_script_etherscan_key() {
let temp = tempdir().unwrap();
let root = temp.path();
let config = r#"
[profile.default]
etherscan_api_key = "amoy"
[etherscan]
amoy = { key = "https://etherscan-amoy.com/" }
"#;
let toml_file = root.join(Config::FILE_NAME);
fs::write(toml_file, config).unwrap();
let args = ScriptArgs::parse_from([
"foundry-cli",
"Contract.sol",
"--etherscan-api-key",
"amoy",
"--root",
root.as_os_str().to_str().unwrap(),
]);
let config = args.load_config().unwrap();
let amoy = config.get_etherscan_api_key(Some(NamedChain::PolygonAmoy.into()));
assert_eq!(amoy, Some("https://etherscan-amoy.com/".to_string()));
}
#[test]
fn can_extract_script_rpc_alias() {
let temp = tempdir().unwrap();
let root = temp.path();
let config = r#"
[profile.default]
[rpc_endpoints]
polygonAmoy = "https://polygon-amoy.g.alchemy.com/v2/${_CAN_EXTRACT_RPC_ALIAS}"
"#;
let toml_file = root.join(Config::FILE_NAME);
fs::write(toml_file, config).unwrap();
let args = ScriptArgs::parse_from([
"foundry-cli",
"DeployV1",
"--rpc-url",
"polygonAmoy",
"--root",
root.as_os_str().to_str().unwrap(),
]);
let err = args.load_config_and_evm_opts().unwrap_err();
assert!(err.downcast::<UnresolvedEnvVarError>().is_ok());
unsafe {
std::env::set_var("_CAN_EXTRACT_RPC_ALIAS", "123456");
}
let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
assert_eq!(config.eth_rpc_url, Some("polygonAmoy".to_string()));
assert_eq!(
evm_opts.fork_url,
Some("https://polygon-amoy.g.alchemy.com/v2/123456".to_string())
);
}
#[test]
fn can_extract_script_rpc_and_etherscan_alias() {
let temp = tempdir().unwrap();
let root = temp.path();
let config = r#"
[profile.default]
[rpc_endpoints]
amoy = "https://polygon-amoy.g.alchemy.com/v2/${_EXTRACT_RPC_ALIAS}"
[etherscan]
amoy = { key = "${_ETHERSCAN_API_KEY}", chain = 80002, url = "https://amoy.polygonscan.com/" }
"#;
let toml_file = root.join(Config::FILE_NAME);
fs::write(toml_file, config).unwrap();
let args = ScriptArgs::parse_from([
"foundry-cli",
"DeployV1",
"--rpc-url",
"amoy",
"--etherscan-api-key",
"amoy",
"--root",
root.as_os_str().to_str().unwrap(),
]);
let err = args.load_config_and_evm_opts().unwrap_err();
assert!(err.downcast::<UnresolvedEnvVarError>().is_ok());
unsafe {
std::env::set_var("_EXTRACT_RPC_ALIAS", "123456");
}
unsafe {
std::env::set_var("_ETHERSCAN_API_KEY", "etherscan_api_key");
}
let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
assert_eq!(config.eth_rpc_url, Some("amoy".to_string()));
assert_eq!(
evm_opts.fork_url,
Some("https://polygon-amoy.g.alchemy.com/v2/123456".to_string())
);
let etherscan = config.get_etherscan_api_key(Some(80002u64.into()));
assert_eq!(etherscan, Some("etherscan_api_key".to_string()));
let etherscan = config.get_etherscan_api_key(None);
assert_eq!(etherscan, Some("etherscan_api_key".to_string()));
}
#[test]
fn can_extract_script_rpc_and_sole_etherscan_alias() {
let temp = tempdir().unwrap();
let root = temp.path();
let config = r#"
[profile.default]
[rpc_endpoints]
amoy = "https://polygon-amoy.g.alchemy.com/v2/${_SOLE_EXTRACT_RPC_ALIAS}"
[etherscan]
amoy = { key = "${_SOLE_ETHERSCAN_API_KEY}" }
"#;
let toml_file = root.join(Config::FILE_NAME);
fs::write(toml_file, config).unwrap();
let args = ScriptArgs::parse_from([
"foundry-cli",
"DeployV1",
"--rpc-url",
"amoy",
"--root",
root.as_os_str().to_str().unwrap(),
]);
let err = args.load_config_and_evm_opts().unwrap_err();
assert!(err.downcast::<UnresolvedEnvVarError>().is_ok());
unsafe {
std::env::set_var("_SOLE_EXTRACT_RPC_ALIAS", "123456");
}
unsafe {
std::env::set_var("_SOLE_ETHERSCAN_API_KEY", "etherscan_api_key");
}
let (config, evm_opts) = args.load_config_and_evm_opts().unwrap();
assert_eq!(
evm_opts.fork_url,
Some("https://polygon-amoy.g.alchemy.com/v2/123456".to_string())
);
let etherscan = config.get_etherscan_api_key(Some(80002u64.into()));
assert_eq!(etherscan, Some("etherscan_api_key".to_string()));
let etherscan = config.get_etherscan_api_key(None);
assert_eq!(etherscan, Some("etherscan_api_key".to_string()));
}
// <https://github.com/foundry-rs/foundry/issues/5923>
#[test]
fn test_5923() {
let args =
ScriptArgs::parse_from(["foundry-cli", "DeployV1", "--priority-gas-price", "100"]);
assert!(args.priority_gas_price.is_some());
}
// <https://github.com/foundry-rs/foundry/issues/5910>
#[test]
fn test_5910() {
let args = ScriptArgs::parse_from([
"foundry-cli",
"--broadcast",
"--with-gas-price",
"0",
"SolveTutorial",
]);
assert!(args.with_gas_price.unwrap().is_zero());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/runner.rs | crates/script/src/runner.rs | use super::{ScriptConfig, ScriptResult};
use crate::build::ScriptPredeployLibraries;
use alloy_eips::eip7702::SignedAuthorization;
use alloy_primitives::{Address, Bytes, TxKind, U256};
use alloy_rpc_types::TransactionRequest;
use eyre::Result;
use foundry_cheatcodes::BroadcastableTransaction;
use foundry_config::Config;
use foundry_evm::{
constants::CALLER,
executors::{DeployResult, EvmError, ExecutionErr, Executor, RawCallResult},
opts::EvmOpts,
revm::interpreter::{InstructionResult, return_ok},
traces::{TraceKind, Traces},
};
use std::collections::VecDeque;
/// Drives script execution
#[derive(Debug)]
pub struct ScriptRunner {
pub executor: Executor,
pub evm_opts: EvmOpts,
}
impl ScriptRunner {
pub fn new(executor: Executor, evm_opts: EvmOpts) -> Self {
Self { executor, evm_opts }
}
/// Deploys the libraries and broadcast contract. Calls setUp method if requested.
pub fn setup(
&mut self,
libraries: &ScriptPredeployLibraries,
code: Bytes,
setup: bool,
script_config: &ScriptConfig,
is_broadcast: bool,
) -> Result<(Address, ScriptResult)> {
trace!(target: "script", "executing setUP()");
if !is_broadcast {
if self.evm_opts.sender == Config::DEFAULT_SENDER {
// We max out their balance so that they can deploy and make calls.
self.executor.set_balance(self.evm_opts.sender, U256::MAX)?;
}
if script_config.evm_opts.fork_url.is_none() {
self.executor.deploy_create2_deployer()?;
}
}
let sender_nonce = script_config.sender_nonce;
self.executor.set_nonce(self.evm_opts.sender, sender_nonce)?;
// We max out their balance so that they can deploy and make calls.
self.executor.set_balance(CALLER, U256::MAX)?;
let mut library_transactions = VecDeque::new();
let mut traces = Traces::default();
// Deploy libraries
match libraries {
ScriptPredeployLibraries::Default(libraries) => libraries.iter().for_each(|code| {
let result = self
.executor
.deploy(self.evm_opts.sender, code.clone(), U256::ZERO, None)
.expect("couldn't deploy library")
.raw;
if let Some(deploy_traces) = result.traces {
traces.push((TraceKind::Deployment, deploy_traces));
}
library_transactions.push_back(BroadcastableTransaction {
rpc: self.evm_opts.fork_url.clone(),
transaction: TransactionRequest {
from: Some(self.evm_opts.sender),
input: code.clone().into(),
nonce: Some(sender_nonce + library_transactions.len() as u64),
..Default::default()
}
.into(),
})
}),
ScriptPredeployLibraries::Create2(libraries, salt) => {
let create2_deployer = self.executor.create2_deployer();
for library in libraries {
let address = create2_deployer.create2_from_code(salt, library.as_ref());
// Skip if already deployed
if !self.executor.is_empty_code(address)? {
continue;
}
let calldata = [salt.as_ref(), library.as_ref()].concat();
let result = self
.executor
.transact_raw(
self.evm_opts.sender,
create2_deployer,
calldata.clone().into(),
U256::from(0),
)
.expect("couldn't deploy library");
if let Some(deploy_traces) = result.traces {
traces.push((TraceKind::Deployment, deploy_traces));
}
library_transactions.push_back(BroadcastableTransaction {
rpc: self.evm_opts.fork_url.clone(),
transaction: TransactionRequest {
from: Some(self.evm_opts.sender),
input: calldata.into(),
nonce: Some(sender_nonce + library_transactions.len() as u64),
to: Some(TxKind::Call(create2_deployer)),
..Default::default()
}
.into(),
});
}
// Sender nonce is not incremented when performing CALLs. We need to manually
// increase it.
self.executor.set_nonce(
self.evm_opts.sender,
sender_nonce + library_transactions.len() as u64,
)?;
}
};
let address = CALLER.create(self.executor.get_nonce(CALLER)?);
// Set the contracts initial balance before deployment, so it is available during the
// construction
self.executor.set_balance(address, self.evm_opts.initial_balance)?;
// HACK: if the current sender is the default script sender (which is a default value), we
// set its nonce to a very large value before deploying the script contract. This
// ensures that the nonce increase during this CREATE does not affect deployment
// addresses of contracts that are deployed in the script, Otherwise, we'd have a
// nonce mismatch during script execution and onchain simulation, potentially
// resulting in weird errors like <https://github.com/foundry-rs/foundry/issues/8960>.
let prev_sender_nonce = self.executor.get_nonce(self.evm_opts.sender)?;
if self.evm_opts.sender == CALLER {
self.executor.set_nonce(self.evm_opts.sender, u64::MAX / 2)?;
}
// Deploy an instance of the contract
let DeployResult {
address,
raw: RawCallResult { mut logs, traces: constructor_traces, .. },
} = self
.executor
.deploy(CALLER, code, U256::ZERO, None)
.map_err(|err| eyre::eyre!("Failed to deploy script:\n{}", err))?;
if self.evm_opts.sender == CALLER {
self.executor.set_nonce(self.evm_opts.sender, prev_sender_nonce)?;
}
// set script address to be used by execution inspector
if script_config.config.script_execution_protection {
self.executor.set_script_execution(address);
}
traces.extend(constructor_traces.map(|traces| (TraceKind::Deployment, traces)));
// Optionally call the `setUp` function
let (success, gas_used, labeled_addresses, transactions) = if !setup {
self.executor.backend_mut().set_test_contract(address);
(true, 0, Default::default(), Some(library_transactions))
} else {
match self.executor.setup(Some(self.evm_opts.sender), address, None) {
Ok(RawCallResult {
reverted,
traces: setup_traces,
labels,
logs: setup_logs,
gas_used,
transactions: setup_transactions,
..
}) => {
traces.extend(setup_traces.map(|traces| (TraceKind::Setup, traces)));
logs.extend_from_slice(&setup_logs);
if let Some(txs) = setup_transactions {
library_transactions.extend(txs);
}
(!reverted, gas_used, labels, Some(library_transactions))
}
Err(EvmError::Execution(err)) => {
let RawCallResult {
reverted,
traces: setup_traces,
labels,
logs: setup_logs,
gas_used,
transactions,
..
} = err.raw;
traces.extend(setup_traces.map(|traces| (TraceKind::Setup, traces)));
logs.extend_from_slice(&setup_logs);
if let Some(txs) = transactions {
library_transactions.extend(txs);
}
(!reverted, gas_used, labels, Some(library_transactions))
}
Err(e) => return Err(e.into()),
}
};
Ok((
address,
ScriptResult {
returned: Bytes::new(),
success,
gas_used,
labeled_addresses,
transactions,
logs,
traces,
address: None,
..Default::default()
},
))
}
/// Executes the method that will collect all broadcastable transactions.
pub fn script(&mut self, address: Address, calldata: Bytes) -> Result<ScriptResult> {
self.call(self.evm_opts.sender, address, calldata, U256::ZERO, None, false)
}
/// Runs a broadcastable transaction locally and persists its state.
pub fn simulate(
&mut self,
from: Address,
to: Option<Address>,
calldata: Option<Bytes>,
value: Option<U256>,
authorization_list: Option<Vec<SignedAuthorization>>,
) -> Result<ScriptResult> {
if let Some(to) = to {
self.call(
from,
to,
calldata.unwrap_or_default(),
value.unwrap_or(U256::ZERO),
authorization_list,
true,
)
} else {
let res = self.executor.deploy(
from,
calldata.expect("No data for create transaction"),
value.unwrap_or(U256::ZERO),
None,
);
let (address, RawCallResult { gas_used, logs, traces, .. }) = match res {
Ok(DeployResult { address, raw }) => (address, raw),
Err(EvmError::Execution(err)) => {
let ExecutionErr { raw, reason } = *err;
sh_err!("Failed with `{reason}`:\n")?;
(Address::ZERO, raw)
}
Err(e) => eyre::bail!("Failed deploying contract: {e:?}"),
};
Ok(ScriptResult {
returned: Bytes::new(),
success: address != Address::ZERO,
gas_used,
logs,
// Manually adjust gas for the trace to add back the stipend/real used gas
traces: traces
.map(|traces| vec![(TraceKind::Execution, traces)])
.unwrap_or_default(),
address: Some(address),
..Default::default()
})
}
}
/// Executes the call
///
/// This will commit the changes if `commit` is true.
///
/// This will return _estimated_ gas instead of the precise gas the call would consume, so it
/// can be used as `gas_limit`.
fn call(
&mut self,
from: Address,
to: Address,
calldata: Bytes,
value: U256,
authorization_list: Option<Vec<SignedAuthorization>>,
commit: bool,
) -> Result<ScriptResult> {
let mut res = if let Some(authorization_list) = &authorization_list {
self.executor.call_raw_with_authorization(
from,
to,
calldata.clone(),
value,
authorization_list.clone(),
)?
} else {
self.executor.call_raw(from, to, calldata.clone(), value)?
};
let mut gas_used = res.gas_used;
// We should only need to calculate realistic gas costs when preparing to broadcast
// something. This happens during the onchain simulation stage, where we commit each
// collected transactions.
//
// Otherwise don't re-execute, or some usecases might be broken: https://github.com/foundry-rs/foundry/issues/3921
if commit {
gas_used = self.search_optimal_gas_usage(&res, from, to, &calldata, value)?;
res = if let Some(authorization_list) = authorization_list {
self.executor.transact_raw_with_authorization(
from,
to,
calldata,
value,
authorization_list,
)?
} else {
self.executor.transact_raw(from, to, calldata, value)?
}
}
let RawCallResult { result, reverted, logs, traces, labels, transactions, .. } = res;
let breakpoints = res.cheatcodes.map(|cheats| cheats.breakpoints).unwrap_or_default();
Ok(ScriptResult {
returned: result,
success: !reverted,
gas_used,
logs,
traces: traces
.map(|traces| {
// Manually adjust gas for the trace to add back the stipend/real used gas
vec![(TraceKind::Execution, traces)]
})
.unwrap_or_default(),
labeled_addresses: labels,
transactions,
address: None,
breakpoints,
})
}
/// The executor will return the _exact_ gas value this transaction consumed, setting this value
/// as gas limit will result in `OutOfGas` so to come up with a better estimate we search over a
/// possible range we pick a higher gas limit 3x of a succeeded call should be safe.
///
/// This might result in executing the same script multiple times. Depending on the user's goal,
/// it might be problematic when using `ffi`.
fn search_optimal_gas_usage(
&mut self,
res: &RawCallResult,
from: Address,
to: Address,
calldata: &Bytes,
value: U256,
) -> Result<u64> {
let mut gas_used = res.gas_used;
if matches!(res.exit_reason, Some(return_ok!())) {
// Store the current gas limit and reset it later.
let init_gas_limit = self.executor.env().tx.gas_limit;
let mut highest_gas_limit = gas_used * 3;
let mut lowest_gas_limit = gas_used;
let mut last_highest_gas_limit = highest_gas_limit;
while (highest_gas_limit - lowest_gas_limit) > 1 {
let mid_gas_limit = (highest_gas_limit + lowest_gas_limit) / 2;
self.executor.env_mut().tx.gas_limit = mid_gas_limit;
let res = self.executor.call_raw(from, to, calldata.0.clone().into(), value)?;
match res.exit_reason {
Some(InstructionResult::Revert)
| Some(InstructionResult::OutOfGas)
| Some(InstructionResult::OutOfFunds) => {
lowest_gas_limit = mid_gas_limit;
}
_ => {
highest_gas_limit = mid_gas_limit;
// if last two successful estimations only vary by 10%, we consider this to
// sufficiently accurate
const ACCURACY: u64 = 10;
if (last_highest_gas_limit - highest_gas_limit) * ACCURACY
/ last_highest_gas_limit
< 1
{
// update the gas
gas_used = highest_gas_limit;
break;
}
last_highest_gas_limit = highest_gas_limit;
}
}
}
// Reset gas limit in the executor.
self.executor.env_mut().tx.gas_limit = init_gas_limit;
}
Ok(gas_used)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/execute.rs | crates/script/src/execute.rs | use super::{JsonResult, NestedValue, ScriptResult, runner::ScriptRunner};
use crate::{
ScriptArgs, ScriptConfig,
build::{CompiledState, LinkedBuildData},
simulate::PreSimulationState,
};
use alloy_dyn_abi::FunctionExt;
use alloy_json_abi::{Function, InternalType, JsonAbi};
use alloy_primitives::{
Address, Bytes,
map::{HashMap, HashSet},
};
use alloy_provider::Provider;
use alloy_rpc_types::TransactionInput;
use eyre::{OptionExt, Result};
use foundry_cheatcodes::Wallets;
use foundry_cli::utils::{ensure_clean_constructor, needs_setup};
use foundry_common::{
ContractsByArtifact,
fmt::{format_token, format_token_raw},
provider::get_http_provider,
};
use foundry_config::NamedChain;
use foundry_debugger::Debugger;
use foundry_evm::{
decode::decode_console_logs,
inspectors::cheatcodes::BroadcastableTransactions,
traces::{
CallTraceDecoder, CallTraceDecoderBuilder, TraceKind, decode_trace_arena,
identifier::{SignaturesIdentifier, TraceIdentifiers},
render_trace_arena,
},
};
use futures::future::join_all;
use itertools::Itertools;
use std::path::Path;
use yansi::Paint;
/// State after linking, contains the linked build data along with library addresses and optional
/// array of libraries that need to be predeployed.
pub struct LinkedState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
}
/// Container for data we need for execution which can only be obtained after linking stage.
#[derive(Debug)]
pub struct ExecutionData {
/// Function to call.
pub func: Function,
/// Calldata to pass to the target contract.
pub calldata: Bytes,
/// Bytecode of the target contract.
pub bytecode: Bytes,
/// ABI of the target contract.
pub abi: JsonAbi,
}
impl LinkedState {
/// Given linked and compiled artifacts, prepares data we need for execution.
/// This includes the function to call and the calldata to pass to it.
pub async fn prepare_execution(self) -> Result<PreExecutionState> {
let Self { args, script_config, script_wallets, build_data } = self;
let target_contract = build_data.get_target_contract()?;
let bytecode = target_contract.bytecode().ok_or_eyre("target contract has no bytecode")?;
let (func, calldata) = args.get_method_and_calldata(&target_contract.abi)?;
ensure_clean_constructor(&target_contract.abi)?;
Ok(PreExecutionState {
args,
script_config,
script_wallets,
execution_data: ExecutionData {
func,
calldata,
bytecode: bytecode.clone(),
abi: target_contract.abi.clone(),
},
build_data,
})
}
}
/// Same as [LinkedState], but also contains [ExecutionData].
#[derive(Debug)]
pub struct PreExecutionState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
pub execution_data: ExecutionData,
}
impl PreExecutionState {
/// Executes the script and returns the state after execution.
/// Might require executing script twice in cases when we determine sender from execution.
pub async fn execute(mut self) -> Result<ExecutedState> {
let mut runner = self
.script_config
.get_runner_with_cheatcodes(
self.build_data.known_contracts.clone(),
self.script_wallets.clone(),
self.args.debug,
self.build_data.build_data.target.clone(),
)
.await?;
let result = self.execute_with_runner(&mut runner).await?;
// If we have a new sender from execution, we need to use it to deploy libraries and relink
// contracts.
if let Some(new_sender) = self.maybe_new_sender(result.transactions.as_ref())? {
self.script_config.update_sender(new_sender).await?;
// Rollback to rerun linking with the new sender.
let state = CompiledState {
args: self.args,
script_config: self.script_config,
script_wallets: self.script_wallets,
build_data: self.build_data.build_data,
};
return Box::pin(state.link().await?.prepare_execution().await?.execute()).await;
}
Ok(ExecutedState {
args: self.args,
script_config: self.script_config,
script_wallets: self.script_wallets,
build_data: self.build_data,
execution_data: self.execution_data,
execution_result: result,
})
}
/// Executes the script using the provided runner and returns the [ScriptResult].
pub async fn execute_with_runner(&self, runner: &mut ScriptRunner) -> Result<ScriptResult> {
let (address, mut setup_result) = runner.setup(
&self.build_data.predeploy_libraries,
self.execution_data.bytecode.clone(),
needs_setup(&self.execution_data.abi),
&self.script_config,
self.args.broadcast,
)?;
if setup_result.success {
let script_result = runner.script(address, self.execution_data.calldata.clone())?;
setup_result.success &= script_result.success;
setup_result.gas_used = script_result.gas_used;
setup_result.logs.extend(script_result.logs);
setup_result.traces.extend(script_result.traces);
setup_result.labeled_addresses.extend(script_result.labeled_addresses);
setup_result.returned = script_result.returned;
setup_result.breakpoints = script_result.breakpoints;
match (&mut setup_result.transactions, script_result.transactions) {
(Some(txs), Some(new_txs)) => {
txs.extend(new_txs);
}
(None, Some(new_txs)) => {
setup_result.transactions = Some(new_txs);
}
_ => {}
}
}
Ok(setup_result)
}
/// It finds the deployer from the running script and uses it to predeploy libraries.
///
/// If there are multiple candidate addresses, it skips everything and lets `--sender` deploy
/// them instead.
fn maybe_new_sender(
&self,
transactions: Option<&BroadcastableTransactions>,
) -> Result<Option<Address>> {
let mut new_sender = None;
if let Some(txs) = transactions {
// If the user passed a `--sender` don't check anything.
if self.build_data.predeploy_libraries.libraries_count() > 0
&& self.args.evm.sender.is_none()
{
for tx in txs {
if tx.transaction.to().is_none() {
let sender = tx.transaction.from().expect("no sender");
if let Some(ns) = new_sender {
if sender != ns {
sh_warn!(
"You have more than one deployer who could predeploy libraries. Using `--sender` instead."
)?;
return Ok(None);
}
} else if sender != self.script_config.evm_opts.sender {
new_sender = Some(sender);
}
}
}
}
}
Ok(new_sender)
}
}
/// Container for information about RPC-endpoints used during script execution.
pub struct RpcData {
/// Unique list of rpc urls present.
pub total_rpcs: HashSet<String>,
/// If true, one of the transactions did not have a rpc.
pub missing_rpc: bool,
}
impl RpcData {
/// Iterates over script transactions and collects RPC urls.
fn from_transactions(txs: &BroadcastableTransactions) -> Self {
let missing_rpc = txs.iter().any(|tx| tx.rpc.is_none());
let total_rpcs =
txs.iter().filter_map(|tx| tx.rpc.as_ref().cloned()).collect::<HashSet<_>>();
Self { total_rpcs, missing_rpc }
}
/// Returns true if script might be multi-chain.
/// Returns false positive in case when missing rpc is the same as the only rpc present.
pub fn is_multi_chain(&self) -> bool {
self.total_rpcs.len() > 1 || (self.missing_rpc && !self.total_rpcs.is_empty())
}
/// Checks if all RPCs support EIP-3855. Prints a warning if not.
async fn check_shanghai_support(&self) -> Result<()> {
let chain_ids = self.total_rpcs.iter().map(|rpc| async move {
let provider = get_http_provider(rpc);
let id = provider.get_chain_id().await.ok()?;
NamedChain::try_from(id).ok()
});
let chains = join_all(chain_ids).await;
let iter = chains.iter().flatten().map(|c| (c.supports_shanghai(), c));
if iter.clone().any(|(s, _)| !s) {
let msg = format!(
"\
EIP-3855 is not supported in one or more of the RPCs used.
Unsupported Chain IDs: {}.
Contracts deployed with a Solidity version equal or higher than 0.8.20 might not work properly.
For more information, please see https://eips.ethereum.org/EIPS/eip-3855",
iter.filter(|(supported, _)| !supported)
.map(|(_, chain)| *chain as u64)
.format(", ")
);
sh_warn!("{msg}")?;
}
Ok(())
}
}
/// Container for data being collected after execution.
pub struct ExecutionArtifacts {
/// Trace decoder used to decode traces.
pub decoder: CallTraceDecoder,
/// Return values from the execution result.
pub returns: HashMap<String, NestedValue>,
/// Information about RPC endpoints used during script execution.
pub rpc_data: RpcData,
}
/// State after the script has been executed.
pub struct ExecutedState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
pub execution_data: ExecutionData,
pub execution_result: ScriptResult,
}
impl ExecutedState {
/// Collects the data we need for simulation and various post-execution tasks.
pub async fn prepare_simulation(self) -> Result<PreSimulationState> {
let returns = self.get_returns()?;
let decoder = self.build_trace_decoder(&self.build_data.known_contracts).await?;
let mut txs = self.execution_result.transactions.clone().unwrap_or_default();
// Ensure that unsigned transactions have both `data` and `input` populated to avoid
// issues with eth_estimateGas and eth_sendTransaction requests.
for tx in &mut txs {
if let Some(req) = tx.transaction.as_unsigned_mut() {
req.input =
TransactionInput::maybe_both(std::mem::take(&mut req.input).into_input());
}
}
let rpc_data = RpcData::from_transactions(&txs);
if rpc_data.is_multi_chain() {
sh_warn!("Multi chain deployment is still under development. Use with caution.")?;
if !self.build_data.libraries.is_empty() {
eyre::bail!(
"Multi chain deployment does not support library linking at the moment."
)
}
}
rpc_data.check_shanghai_support().await?;
Ok(PreSimulationState {
args: self.args,
script_config: self.script_config,
script_wallets: self.script_wallets,
build_data: self.build_data,
execution_data: self.execution_data,
execution_result: self.execution_result,
execution_artifacts: ExecutionArtifacts { decoder, returns, rpc_data },
})
}
/// Builds [CallTraceDecoder] from the execution result and known contracts.
async fn build_trace_decoder(
&self,
known_contracts: &ContractsByArtifact,
) -> Result<CallTraceDecoder> {
let mut decoder = CallTraceDecoderBuilder::new()
.with_labels(self.execution_result.labeled_addresses.clone())
.with_verbosity(self.script_config.evm_opts.verbosity)
.with_known_contracts(known_contracts)
.with_signature_identifier(SignaturesIdentifier::from_config(
&self.script_config.config,
)?)
.with_label_disabled(self.args.disable_labels)
.build();
let mut identifier = TraceIdentifiers::new().with_local(known_contracts).with_external(
&self.script_config.config,
self.script_config.evm_opts.get_remote_chain_id().await,
)?;
for (_, trace) in &self.execution_result.traces {
decoder.identify(trace, &mut identifier);
}
Ok(decoder)
}
/// Collects the return values from the execution result.
fn get_returns(&self) -> Result<HashMap<String, NestedValue>> {
let mut returns = HashMap::default();
let returned = &self.execution_result.returned;
let func = &self.execution_data.func;
match func.abi_decode_output(returned) {
Ok(decoded) => {
for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() {
let internal_type =
output.internal_type.clone().unwrap_or(InternalType::Other {
contract: None,
ty: "unknown".to_string(),
});
let label = if !output.name.is_empty() {
output.name.to_string()
} else {
index.to_string()
};
returns.insert(
label,
NestedValue {
internal_type: internal_type.to_string(),
value: format_token_raw(token),
},
);
}
}
Err(_) => {
sh_err!("Failed to decode return value: {:x?}", returned)?;
}
}
Ok(returns)
}
}
impl PreSimulationState {
pub async fn show_json(&self) -> Result<()> {
let mut result = self.execution_result.clone();
for (_, trace) in &mut result.traces {
decode_trace_arena(trace, &self.execution_artifacts.decoder).await;
}
let json_result = JsonResult {
logs: decode_console_logs(&result.logs),
returns: &self.execution_artifacts.returns,
result: &result,
};
let json = serde_json::to_string(&json_result)?;
sh_println!("{json}")?;
if !self.execution_result.success {
return Err(eyre::eyre!(
"script failed: {}",
&self.execution_artifacts.decoder.revert_decoder.decode(&result.returned[..], None)
));
}
Ok(())
}
pub async fn show_traces(&self) -> Result<()> {
let verbosity = self.script_config.evm_opts.verbosity;
let func = &self.execution_data.func;
let result = &self.execution_result;
let decoder = &self.execution_artifacts.decoder;
if !result.success || verbosity > 3 {
if result.traces.is_empty() {
warn!(verbosity, "no traces");
}
sh_println!("Traces:")?;
for (kind, trace) in &result.traces {
let should_include = match kind {
TraceKind::Setup => verbosity >= 5,
TraceKind::Execution => verbosity > 3,
_ => false,
} || !result.success;
if should_include {
let mut trace = trace.clone();
decode_trace_arena(&mut trace, decoder).await;
sh_println!("{}", render_trace_arena(&trace))?;
}
}
sh_println!()?;
}
if result.success {
sh_println!("{}", "Script ran successfully.".green())?;
}
if self.script_config.evm_opts.fork_url.is_none() {
sh_println!("Gas used: {}", result.gas_used)?;
}
if result.success && !result.returned.is_empty() {
sh_println!("\n== Return ==")?;
match func.abi_decode_output(&result.returned) {
Ok(decoded) => {
for (index, (token, output)) in decoded.iter().zip(&func.outputs).enumerate() {
let internal_type =
output.internal_type.clone().unwrap_or(InternalType::Other {
contract: None,
ty: "unknown".to_string(),
});
let label = if !output.name.is_empty() {
output.name.to_string()
} else {
index.to_string()
};
sh_println!(
"{label}: {internal_type} {value}",
label = label.trim_end(),
value = format_token(token)
)?;
}
}
Err(_) => {
sh_err!("{:x?}", (&result.returned))?;
}
}
}
let console_logs = decode_console_logs(&result.logs);
if !console_logs.is_empty() {
sh_println!("\n== Logs ==")?;
for log in console_logs {
sh_println!(" {log}")?;
}
}
if !result.success {
return Err(eyre::eyre!(
"script failed: {}",
&self.execution_artifacts.decoder.revert_decoder.decode(&result.returned[..], None)
));
}
Ok(())
}
pub fn run_debugger(self) -> Result<()> {
self.create_debugger().try_run_tui()?;
Ok(())
}
pub fn dump_debugger(self, path: &Path) -> Result<()> {
self.create_debugger().dump_to_file(path)?;
Ok(())
}
fn create_debugger(self) -> Debugger {
Debugger::builder()
.traces(
self.execution_result
.traces
.into_iter()
.filter(|(t, _)| t.is_execution())
.collect(),
)
.decoder(&self.execution_artifacts.decoder)
.sources(self.build_data.sources)
.breakpoints(self.execution_result.breakpoints)
.build()
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/broadcast.rs | crates/script/src/broadcast.rs | use std::{cmp::Ordering, sync::Arc, time::Duration};
use alloy_chains::{Chain, NamedChain};
use alloy_consensus::TxEnvelope;
use alloy_eips::{BlockId, eip2718::Encodable2718};
use alloy_network::{AnyNetwork, EthereumWallet, TransactionBuilder};
use alloy_primitives::{
Address, TxHash,
map::{AddressHashMap, AddressHashSet},
utils::format_units,
};
use alloy_provider::{Provider, utils::Eip1559Estimation};
use alloy_rpc_types::TransactionRequest;
use alloy_serde::WithOtherFields;
use eyre::{Context, Result, bail};
use forge_verify::provider::VerificationProviderType;
use foundry_cheatcodes::Wallets;
use foundry_cli::utils::{has_batch_support, has_different_gas_calc};
use foundry_common::{
TransactionMaybeSigned,
provider::{RetryProvider, get_http_provider, try_get_http_provider},
shell,
};
use foundry_config::Config;
use futures::{FutureExt, StreamExt, future::join_all, stream::FuturesUnordered};
use itertools::Itertools;
use crate::{
ScriptArgs, ScriptConfig, build::LinkedBuildData, progress::ScriptProgress,
sequence::ScriptSequenceKind, verify::BroadcastedState,
};
pub async fn estimate_gas<P: Provider<AnyNetwork>>(
tx: &mut WithOtherFields<TransactionRequest>,
provider: &P,
estimate_multiplier: u64,
) -> Result<()> {
// if already set, some RPC endpoints might simply return the gas value that is already
// set in the request and omit the estimate altogether, so we remove it here
tx.gas = None;
tx.set_gas_limit(
provider.estimate_gas(tx.clone()).await.wrap_err("Failed to estimate gas for tx")?
* estimate_multiplier
/ 100,
);
Ok(())
}
pub async fn next_nonce(
caller: Address,
provider_url: &str,
block_number: Option<u64>,
) -> eyre::Result<u64> {
let provider = try_get_http_provider(provider_url)
.wrap_err_with(|| format!("bad fork_url provider: {provider_url}"))?;
let block_id = block_number.map_or(BlockId::latest(), BlockId::number);
Ok(provider.get_transaction_count(caller).block_id(block_id).await?)
}
/// Represents how to send a single transaction.
#[derive(Clone)]
pub enum SendTransactionKind<'a> {
Unlocked(WithOtherFields<TransactionRequest>),
Raw(WithOtherFields<TransactionRequest>, &'a EthereumWallet),
Signed(TxEnvelope),
}
impl<'a> SendTransactionKind<'a> {
/// Prepares the transaction for broadcasting by synchronizing nonce and estimating gas.
///
/// This method performs two key operations:
/// 1. Nonce synchronization: Waits for the provider's nonce to catch up to the expected
/// transaction nonce when doing sequential broadcast
/// 2. Gas estimation: Re-estimates gas right before broadcasting for chains that require it
pub async fn prepare(
&mut self,
provider: &RetryProvider,
sequential_broadcast: bool,
is_fixed_gas_limit: bool,
estimate_via_rpc: bool,
estimate_multiplier: u64,
) -> Result<()> {
if let Self::Raw(tx, _) | Self::Unlocked(tx) = self {
if sequential_broadcast {
let from = tx.from.expect("no sender");
let tx_nonce = tx.nonce.expect("no nonce");
for attempt in 0..5 {
let nonce = provider.get_transaction_count(from).await?;
match nonce.cmp(&tx_nonce) {
Ordering::Greater => {
bail!(
"EOA nonce changed unexpectedly while sending transactions. Expected {tx_nonce} got {nonce} from provider."
)
}
Ordering::Less => {
if attempt == 4 {
bail!(
"After 5 attempts, provider nonce ({nonce}) is still behind expected nonce ({tx_nonce})."
)
}
warn!(
"Expected nonce ({tx_nonce}) is ahead of provider nonce ({nonce}). Retrying in 1 second..."
);
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
}
Ordering::Equal => {
// Nonces are equal, we can proceed.
break;
}
}
}
}
// Chains which use `eth_estimateGas` are being sent sequentially and require their
// gas to be re-estimated right before broadcasting.
if !is_fixed_gas_limit && estimate_via_rpc {
estimate_gas(tx, provider, estimate_multiplier).await?;
}
}
Ok(())
}
/// Sends the transaction to the network.
///
/// Depending on the transaction kind, this will either:
/// - Submit via `eth_sendTransaction` for unlocked accounts
/// - Sign and submit via `eth_sendRawTransaction` for raw transactions
/// - Submit pre-signed transaction via `eth_sendRawTransaction`
pub async fn send(self, provider: Arc<RetryProvider>) -> Result<TxHash> {
let pending = match self {
Self::Unlocked(tx) => {
debug!("sending transaction from unlocked account {:?}", tx);
// Submit the transaction
provider.send_transaction(tx).await?
}
Self::Raw(tx, signer) => {
debug!("sending transaction: {:?}", tx);
let signed = tx.build(signer).await?;
// Submit the raw transaction
provider.send_raw_transaction(signed.encoded_2718().as_ref()).await?
}
Self::Signed(tx) => {
debug!("sending transaction: {:?}", tx);
provider.send_raw_transaction(tx.encoded_2718().as_ref()).await?
}
};
Ok(*pending.tx_hash())
}
/// Prepares and sends the transaction in one operation.
///
/// This is a convenience method that combines [`prepare`](Self::prepare) and
/// [`send`](Self::send) into a single call.
pub async fn prepare_and_send(
mut self,
provider: Arc<RetryProvider>,
sequential_broadcast: bool,
is_fixed_gas_limit: bool,
estimate_via_rpc: bool,
estimate_multiplier: u64,
) -> Result<TxHash> {
self.prepare(
&provider,
sequential_broadcast,
is_fixed_gas_limit,
estimate_via_rpc,
estimate_multiplier,
)
.await?;
self.send(provider).await
}
}
/// Represents how to send _all_ transactions
pub enum SendTransactionsKind {
/// Send via `eth_sendTransaction` and rely on the `from` address being unlocked.
Unlocked(AddressHashSet),
/// Send a signed transaction via `eth_sendRawTransaction`
Raw(AddressHashMap<EthereumWallet>),
}
impl SendTransactionsKind {
/// Returns the [`SendTransactionKind`] for the given address
///
/// Returns an error if no matching signer is found or the address is not unlocked
pub fn for_sender(
&self,
addr: &Address,
tx: WithOtherFields<TransactionRequest>,
) -> Result<SendTransactionKind<'_>> {
match self {
Self::Unlocked(unlocked) => {
if !unlocked.contains(addr) {
bail!("Sender address {:?} is not unlocked", addr)
}
Ok(SendTransactionKind::Unlocked(tx))
}
Self::Raw(wallets) => {
if let Some(wallet) = wallets.get(addr) {
Ok(SendTransactionKind::Raw(tx, wallet))
} else {
bail!("No matching signer for {:?} found", addr)
}
}
}
}
}
/// State after we have bundled all
/// [`TransactionWithMetadata`](forge_script_sequence::TransactionWithMetadata) objects into a
/// single [`ScriptSequenceKind`] object containing one or more script sequences.
pub struct BundledState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
pub sequence: ScriptSequenceKind,
}
impl BundledState {
pub async fn wait_for_pending(mut self) -> Result<Self> {
let progress = ScriptProgress::default();
let progress_ref = &progress;
let futs = self
.sequence
.sequences_mut()
.iter_mut()
.enumerate()
.map(|(sequence_idx, sequence)| async move {
let rpc_url = sequence.rpc_url();
let provider = Arc::new(get_http_provider(rpc_url));
progress_ref
.wait_for_pending(
sequence_idx,
sequence,
&provider,
self.script_config.config.transaction_timeout,
)
.await
})
.collect::<Vec<_>>();
let errors = join_all(futs).await.into_iter().filter_map(Result::err).collect::<Vec<_>>();
self.sequence.save(true, false)?;
if !errors.is_empty() {
return Err(eyre::eyre!("{}", errors.iter().format("\n")));
}
Ok(self)
}
/// Broadcasts transactions from all sequences.
pub async fn broadcast(mut self) -> Result<BroadcastedState> {
let required_addresses = self
.sequence
.sequences()
.iter()
.flat_map(|sequence| {
sequence
.transactions()
.filter(|tx| tx.is_unsigned())
.map(|tx| tx.from().expect("missing from"))
})
.collect::<AddressHashSet>();
if required_addresses.contains(&Config::DEFAULT_SENDER) {
eyre::bail!(
"You seem to be using Foundry's default sender. Be sure to set your own --sender."
);
}
let send_kind = if self.args.unlocked {
SendTransactionsKind::Unlocked(required_addresses.clone())
} else {
let signers = self.script_wallets.into_multi_wallet().into_signers()?;
let mut missing_addresses = Vec::new();
for addr in &required_addresses {
if !signers.contains_key(addr) {
missing_addresses.push(addr);
}
}
if !missing_addresses.is_empty() {
eyre::bail!(
"No associated wallet for addresses: {:?}. Unlocked wallets: {:?}",
missing_addresses,
signers.keys().collect::<Vec<_>>()
);
}
let signers = signers
.into_iter()
.map(|(addr, signer)| (addr, EthereumWallet::new(signer)))
.collect();
SendTransactionsKind::Raw(signers)
};
let progress = ScriptProgress::default();
for i in 0..self.sequence.sequences().len() {
let mut sequence = self.sequence.sequences_mut().get_mut(i).unwrap();
let provider = Arc::new(try_get_http_provider(sequence.rpc_url())?);
let already_broadcasted = sequence.receipts.len();
let seq_progress = progress.get_sequence_progress(i, sequence);
if already_broadcasted < sequence.transactions.len() {
let is_legacy = Chain::from(sequence.chain).is_legacy() || self.args.legacy;
// Make a one-time gas price estimation
let (gas_price, eip1559_fees) = match (
is_legacy,
self.args.with_gas_price,
self.args.priority_gas_price,
) {
(true, Some(gas_price), _) => (Some(gas_price.to()), None),
(true, None, _) => (Some(provider.get_gas_price().await?), None),
(false, Some(max_fee_per_gas), Some(max_priority_fee_per_gas)) => (
None,
Some(Eip1559Estimation {
max_fee_per_gas: max_fee_per_gas.to(),
max_priority_fee_per_gas: max_priority_fee_per_gas.to(),
}),
),
(false, _, _) => {
let mut fees = provider.estimate_eip1559_fees().await.wrap_err("Failed to estimate EIP1559 fees. This chain might not support EIP1559, try adding --legacy to your command.")?;
if let Some(gas_price) = self.args.with_gas_price {
fees.max_fee_per_gas = gas_price.to();
}
if let Some(priority_gas_price) = self.args.priority_gas_price {
fees.max_priority_fee_per_gas = priority_gas_price.to();
}
(None, Some(fees))
}
};
// Iterate through transactions, matching the `from` field with the associated
// wallet. Then send the transaction. Panics if we find a unknown `from`
let transactions = sequence
.transactions
.iter()
.skip(already_broadcasted)
.map(|tx_with_metadata| {
let is_fixed_gas_limit = tx_with_metadata.is_fixed_gas_limit;
let kind = match tx_with_metadata.tx().clone() {
TransactionMaybeSigned::Signed { tx, .. } => {
SendTransactionKind::Signed(tx)
}
TransactionMaybeSigned::Unsigned(mut tx) => {
let from = tx.from.expect("No sender for onchain transaction!");
tx.set_chain_id(sequence.chain);
// Set TxKind::Create explicitly to satisfy `check_reqd_fields` in
// alloy
if tx.to.is_none() {
tx.set_create();
}
if let Some(gas_price) = gas_price {
tx.set_gas_price(gas_price);
} else {
let eip1559_fees = eip1559_fees.expect("was set above");
tx.set_max_priority_fee_per_gas(
eip1559_fees.max_priority_fee_per_gas,
);
tx.set_max_fee_per_gas(eip1559_fees.max_fee_per_gas);
}
send_kind.for_sender(&from, tx)?
}
};
Ok((kind, is_fixed_gas_limit))
})
.collect::<Result<Vec<_>>>()?;
let estimate_via_rpc =
has_different_gas_calc(sequence.chain) || self.args.skip_simulation;
// We only wait for a transaction receipt before sending the next transaction, if
// there is more than one signer. There would be no way of assuring
// their order otherwise.
// Or if the chain does not support batched transactions (eg. Arbitrum).
// Or if we need to invoke eth_estimateGas before sending transactions.
let sequential_broadcast = estimate_via_rpc
|| self.args.slow
|| required_addresses.len() != 1
|| !has_batch_support(sequence.chain);
// We send transactions and wait for receipts in batches.
let batch_size = if sequential_broadcast { 1 } else { self.args.batch_size };
let mut index = already_broadcasted;
for (batch_number, batch) in transactions.chunks(batch_size).enumerate() {
seq_progress.inner.write().set_status(&format!(
"Sending transactions [{} - {}]",
batch_number * batch_size,
batch_number * batch_size + std::cmp::min(batch_size, batch.len()) - 1
));
if !batch.is_empty() {
let pending_transactions =
batch.iter().map(|(kind, is_fixed_gas_limit)| {
let provider = provider.clone();
async move {
let res = kind
.clone()
.prepare_and_send(
provider,
sequential_broadcast,
*is_fixed_gas_limit,
estimate_via_rpc,
self.args.gas_estimate_multiplier,
)
.await;
(res, kind, 0, None)
}
.boxed()
});
let mut buffer = pending_transactions.collect::<FuturesUnordered<_>>();
'send: while let Some((res, kind, attempt, original_res)) =
buffer.next().await
{
if res.is_err() && attempt <= 3 {
// Try to resubmit the transaction
let provider = provider.clone();
let progress = seq_progress.inner.clone();
buffer.push(Box::pin(async move {
debug!(err=?res, ?attempt, "retrying transaction ");
let attempt = attempt + 1;
progress.write().set_status(&format!(
"retrying transaction {res:?} (attempt {attempt})"
));
tokio::time::sleep(Duration::from_millis(1000 * attempt)).await;
let r = kind.clone().send(provider).await;
(r, kind, attempt, original_res.or(Some(res)))
}));
continue 'send;
}
// Preserve the original error if any
let tx_hash = res.wrap_err_with(|| {
if let Some(original_res) = original_res {
format!(
"Failed to send transaction after {attempt} attempts {original_res:?}"
)
} else {
"Failed to send transaction".to_string()
}
})?;
sequence.add_pending(index, tx_hash);
// Checkpoint save
self.sequence.save(true, false)?;
sequence = self.sequence.sequences_mut().get_mut(i).unwrap();
seq_progress.inner.write().tx_sent(tx_hash);
index += 1;
}
// Checkpoint save
self.sequence.save(true, false)?;
sequence = self.sequence.sequences_mut().get_mut(i).unwrap();
progress
.wait_for_pending(
i,
sequence,
&provider,
self.script_config.config.transaction_timeout,
)
.await?
}
// Checkpoint save
self.sequence.save(true, false)?;
sequence = self.sequence.sequences_mut().get_mut(i).unwrap();
}
}
let (total_gas, total_gas_price, total_paid) =
sequence.receipts.iter().fold((0, 0, 0), |acc, receipt| {
let gas_used = receipt.gas_used;
let gas_price = receipt.effective_gas_price as u64;
(acc.0 + gas_used, acc.1 + gas_price, acc.2 + gas_used * gas_price)
});
let paid = format_units(total_paid, 18).unwrap_or_else(|_| "N/A".to_string());
let avg_gas_price = format_units(total_gas_price / sequence.receipts.len() as u64, 9)
.unwrap_or_else(|_| "N/A".to_string());
let token_symbol = NamedChain::try_from(sequence.chain)
.unwrap_or_default()
.native_currency_symbol()
.unwrap_or("ETH");
seq_progress.inner.write().set_status(&format!(
"Total Paid: {} {} ({} gas * avg {} gwei)\n",
paid.trim_end_matches('0'),
token_symbol,
total_gas,
avg_gas_price.trim_end_matches('0').trim_end_matches('.')
));
seq_progress.inner.write().finish();
}
if !shell::is_json() {
sh_println!("\n\n==========================")?;
sh_println!("\nONCHAIN EXECUTION COMPLETE & SUCCESSFUL.")?;
}
Ok(BroadcastedState {
args: self.args,
script_config: self.script_config,
build_data: self.build_data,
sequence: self.sequence,
})
}
pub fn verify_preflight_check(&self) -> Result<()> {
for sequence in self.sequence.sequences() {
if self.args.verifier.verifier == VerificationProviderType::Etherscan
&& self
.script_config
.config
.get_etherscan_api_key(Some(sequence.chain.into()))
.is_none()
{
eyre::bail!("Missing etherscan key for chain {}", sequence.chain);
}
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/build.rs | crates/script/src/build.rs | use crate::{
ScriptArgs, ScriptConfig, broadcast::BundledState, execute::LinkedState,
multi_sequence::MultiChainSequence, sequence::ScriptSequenceKind,
};
use alloy_primitives::{B256, Bytes};
use alloy_provider::Provider;
use eyre::{OptionExt, Result};
use forge_script_sequence::ScriptSequence;
use foundry_cheatcodes::Wallets;
use foundry_common::{
ContractData, ContractsByArtifact, compile::ProjectCompiler, provider::try_get_http_provider,
};
use foundry_compilers::{
ArtifactId, ProjectCompileOutput,
artifacts::{BytecodeObject, Libraries},
compilers::{Language, multi::MultiCompilerLanguage},
info::ContractInfo,
utils::source_files_iter,
};
use foundry_evm::traces::debug::ContractSources;
use foundry_linking::Linker;
use std::{path::PathBuf, str::FromStr, sync::Arc};
/// Container for the compiled contracts.
#[derive(Debug)]
pub struct BuildData {
/// Root of the project.
pub project_root: PathBuf,
/// The compiler output.
pub output: ProjectCompileOutput,
/// ID of target contract artifact.
pub target: ArtifactId,
}
impl BuildData {
pub fn get_linker(&self) -> Linker<'_> {
Linker::new(self.project_root.clone(), self.output.artifact_ids().collect())
}
/// Links contracts. Uses CREATE2 linking when possible, otherwise falls back to
/// default linking with sender nonce and address.
pub async fn link(self, script_config: &ScriptConfig) -> Result<LinkedBuildData> {
let create2_deployer = script_config.evm_opts.create2_deployer;
let can_use_create2 = if let Some(fork_url) = &script_config.evm_opts.fork_url {
let provider = try_get_http_provider(fork_url)?;
let deployer_code = provider.get_code_at(create2_deployer).await?;
!deployer_code.is_empty()
} else {
// If --fork-url is not provided, we are just simulating the script.
true
};
let known_libraries = script_config.config.libraries_with_remappings()?;
let maybe_create2_link_output = can_use_create2
.then(|| {
self.get_linker()
.link_with_create2(
known_libraries.clone(),
create2_deployer,
script_config.config.create2_library_salt,
&self.target,
)
.ok()
})
.flatten();
let (libraries, predeploy_libs) = if let Some(output) = maybe_create2_link_output {
(
output.libraries,
ScriptPredeployLibraries::Create2(
output.libs_to_deploy,
script_config.config.create2_library_salt,
),
)
} else {
let output = self.get_linker().link_with_nonce_or_address(
known_libraries,
script_config.evm_opts.sender,
script_config.sender_nonce,
[&self.target],
)?;
(output.libraries, ScriptPredeployLibraries::Default(output.libs_to_deploy))
};
LinkedBuildData::new(libraries, predeploy_libs, self)
}
/// Links the build data with the given libraries. Expects supplied libraries set being enough
/// to fully link target contract.
pub fn link_with_libraries(self, libraries: Libraries) -> Result<LinkedBuildData> {
LinkedBuildData::new(libraries, ScriptPredeployLibraries::Default(Vec::new()), self)
}
}
#[derive(Debug)]
pub enum ScriptPredeployLibraries {
Default(Vec<Bytes>),
Create2(Vec<Bytes>, B256),
}
impl ScriptPredeployLibraries {
pub fn libraries_count(&self) -> usize {
match self {
Self::Default(libs) => libs.len(),
Self::Create2(libs, _) => libs.len(),
}
}
}
/// Container for the linked contracts and their dependencies
#[derive(Debug)]
pub struct LinkedBuildData {
/// Original build data, might be used to relink this object with different libraries.
pub build_data: BuildData,
/// Known fully linked contracts.
pub known_contracts: ContractsByArtifact,
/// Libraries used to link the contracts.
pub libraries: Libraries,
/// Libraries that need to be deployed by sender before script execution.
pub predeploy_libraries: ScriptPredeployLibraries,
/// Source files of the contracts. Used by debugger.
pub sources: ContractSources,
}
impl LinkedBuildData {
pub fn new(
libraries: Libraries,
predeploy_libraries: ScriptPredeployLibraries,
build_data: BuildData,
) -> Result<Self> {
let sources = ContractSources::from_project_output(
&build_data.output,
&build_data.project_root,
Some(&libraries),
)?;
let known_contracts =
ContractsByArtifact::new(build_data.get_linker().get_linked_artifacts(&libraries)?);
Ok(Self { build_data, known_contracts, libraries, predeploy_libraries, sources })
}
/// Fetches target bytecode from linked contracts.
pub fn get_target_contract(&self) -> Result<&ContractData> {
self.known_contracts
.get(&self.build_data.target)
.ok_or_eyre("target not found in linked artifacts")
}
}
/// First state basically containing only inputs of the user.
pub struct PreprocessedState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
}
impl PreprocessedState {
/// Parses user input and compiles the contracts depending on script target.
/// After compilation, finds exact [ArtifactId] of the target contract.
pub fn compile(self) -> Result<CompiledState> {
let Self { args, script_config, script_wallets } = self;
let project = script_config.config.project()?;
let mut target_name = args.target_contract.clone();
// If we've received correct path, use it as target_path
// Otherwise, parse input as <path>:<name> and use the path from the contract info, if
// present.
let target_path = if let Ok(path) = dunce::canonicalize(&args.path) {
path
} else {
let contract = ContractInfo::from_str(&args.path)?;
target_name = Some(contract.name.clone());
if let Some(path) = contract.path {
dunce::canonicalize(path)?
} else {
project.find_contract_path(contract.name.as_str())?
}
};
#[expect(clippy::redundant_clone)]
let sources_to_compile = source_files_iter(
project.paths.sources.as_path(),
MultiCompilerLanguage::FILE_EXTENSIONS,
)
.chain([target_path.to_path_buf()]);
let output = ProjectCompiler::new().files(sources_to_compile).compile(&project)?;
let mut target_id: Option<ArtifactId> = None;
// Find target artifact id by name and path in compilation artifacts.
for (id, contract) in output.artifact_ids().filter(|(id, _)| id.source == target_path) {
if let Some(name) = &target_name {
if id.name != *name {
continue;
}
} else if contract.abi.as_ref().is_none_or(|abi| abi.is_empty())
|| contract.bytecode.as_ref().is_none_or(|b| match &b.object {
BytecodeObject::Bytecode(b) => b.is_empty(),
BytecodeObject::Unlinked(_) => false,
})
{
// Ignore contracts with empty abi or linked bytecode of length 0 which are
// interfaces/abstract contracts/libraries.
continue;
}
if let Some(target) = target_id {
// We might have multiple artifacts for the same contract but with different
// solc versions. Their names will have form of {name}.0.X.Y, so we are
// stripping versions off before comparing them.
let target_name = target.name.split('.').next().unwrap();
let id_name = id.name.split('.').next().unwrap();
if target_name != id_name {
eyre::bail!(
"Multiple contracts in the target path. Please specify the contract name with `--tc ContractName`"
)
}
}
target_id = Some(id);
}
let target = target_id.ok_or_eyre("Could not find target contract")?;
Ok(CompiledState {
args,
script_config,
script_wallets,
build_data: BuildData { output, target, project_root: project.root().to_path_buf() },
})
}
}
/// State after we have determined and compiled target contract to be executed.
pub struct CompiledState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: BuildData,
}
impl CompiledState {
/// Uses provided sender address to compute library addresses and link contracts with them.
pub async fn link(self) -> Result<LinkedState> {
let Self { args, script_config, script_wallets, build_data } = self;
let build_data = build_data.link(&script_config).await?;
Ok(LinkedState { args, script_config, script_wallets, build_data })
}
/// Tries loading the resumed state from the cache files, skipping simulation stage.
pub async fn resume(self) -> Result<BundledState> {
let chain = if self.args.multi {
None
} else {
let fork_url = self.script_config.evm_opts.fork_url.clone().ok_or_eyre("Missing --fork-url field, if you were trying to broadcast a multi-chain sequence, please use --multi flag")?;
let provider = Arc::new(try_get_http_provider(fork_url)?);
Some(provider.get_chain_id().await?)
};
let sequence = match self.try_load_sequence(chain, false) {
Ok(sequence) => sequence,
Err(_) => {
// If the script was simulated, but there was no attempt to broadcast yet,
// try to read the script sequence from the `dry-run/` folder
let mut sequence = self.try_load_sequence(chain, true)?;
// If sequence was in /dry-run, Update its paths so it is not saved into /dry-run
// this time as we are about to broadcast it.
sequence.update_paths_to_broadcasted(
&self.script_config.config,
&self.args.sig,
&self.build_data.target,
)?;
sequence.save(true, true)?;
sequence
}
};
let (args, build_data, script_wallets, script_config) = if !self.args.unlocked {
let mut froms = sequence.sequences().iter().flat_map(|s| {
s.transactions
.iter()
.skip(s.receipts.len())
.map(|t| t.transaction.from().expect("from is missing in script artifact"))
});
let available_signers = self
.script_wallets
.signers()
.map_err(|e| eyre::eyre!("Failed to get available signers: {}", e))?;
if !froms.all(|from| available_signers.contains(&from)) {
// IF we are missing required signers, execute script as we might need to collect
// private keys from the execution.
let executed = self.link().await?.prepare_execution().await?.execute().await?;
(
executed.args,
executed.build_data.build_data,
executed.script_wallets,
executed.script_config,
)
} else {
(self.args, self.build_data, self.script_wallets, self.script_config)
}
} else {
(self.args, self.build_data, self.script_wallets, self.script_config)
};
// Collect libraries from sequence and link contracts with them.
let libraries = match sequence {
ScriptSequenceKind::Single(ref seq) => Libraries::parse(&seq.libraries)?,
// Library linking is not supported for multi-chain sequences
ScriptSequenceKind::Multi(_) => Libraries::default(),
};
let linked_build_data = build_data.link_with_libraries(libraries)?;
Ok(BundledState {
args,
script_config,
script_wallets,
build_data: linked_build_data,
sequence,
})
}
fn try_load_sequence(&self, chain: Option<u64>, dry_run: bool) -> Result<ScriptSequenceKind> {
if let Some(chain) = chain {
let sequence = ScriptSequence::load(
&self.script_config.config,
&self.args.sig,
&self.build_data.target,
chain,
dry_run,
)?;
Ok(ScriptSequenceKind::Single(sequence))
} else {
let sequence = MultiChainSequence::load(
&self.script_config.config,
&self.args.sig,
&self.build_data.target,
dry_run,
)?;
Ok(ScriptSequenceKind::Multi(sequence))
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/verify.rs | crates/script/src/verify.rs | use crate::{
ScriptArgs, ScriptConfig,
build::LinkedBuildData,
sequence::{ScriptSequenceKind, get_commit_hash},
};
use alloy_primitives::{Address, hex};
use eyre::{Result, eyre};
use forge_script_sequence::{AdditionalContract, ScriptSequence};
use forge_verify::{RetryArgs, VerifierArgs, VerifyArgs, provider::VerificationProviderType};
use foundry_cli::opts::{EtherscanOpts, ProjectPathOpts};
use foundry_common::ContractsByArtifact;
use foundry_compilers::{Project, artifacts::EvmVersion, info::ContractInfo};
use foundry_config::{Chain, Config};
use semver::Version;
/// State after we have broadcasted the script.
/// It is assumed that at this point [BroadcastedState::sequence] contains receipts for all
/// broadcasted transactions.
pub struct BroadcastedState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub build_data: LinkedBuildData,
pub sequence: ScriptSequenceKind,
}
impl BroadcastedState {
pub async fn verify(self) -> Result<()> {
let Self { args, script_config, build_data, mut sequence, .. } = self;
let verify = VerifyBundle::new(
&script_config.config.project()?,
&script_config.config,
build_data.known_contracts,
args.retry,
args.verifier,
);
for sequence in sequence.sequences_mut() {
verify_contracts(sequence, &script_config.config, verify.clone()).await?;
}
Ok(())
}
}
/// Data struct to help `ScriptSequence` verify contracts on `etherscan`.
#[derive(Clone)]
pub struct VerifyBundle {
pub num_of_optimizations: Option<usize>,
pub known_contracts: ContractsByArtifact,
pub project_paths: ProjectPathOpts,
pub etherscan: EtherscanOpts,
pub retry: RetryArgs,
pub verifier: VerifierArgs,
pub via_ir: bool,
}
impl VerifyBundle {
pub fn new(
project: &Project,
config: &Config,
known_contracts: ContractsByArtifact,
retry: RetryArgs,
verifier: VerifierArgs,
) -> Self {
let num_of_optimizations =
if config.optimizer == Some(true) { config.optimizer_runs } else { None };
let config_path = config.get_config_path();
let project_paths = ProjectPathOpts {
root: Some(project.paths.root.clone()),
contracts: Some(project.paths.sources.clone()),
remappings: project.paths.remappings.clone(),
remappings_env: None,
cache_path: Some(project.paths.cache.clone()),
lib_paths: project.paths.libraries.clone(),
hardhat: config.profile == Config::HARDHAT_PROFILE,
config_path: if config_path.exists() { Some(config_path) } else { None },
};
let via_ir = config.via_ir;
Self {
num_of_optimizations,
known_contracts,
etherscan: Default::default(),
project_paths,
retry,
verifier,
via_ir,
}
}
/// Configures the chain and sets the etherscan key, if available
pub fn set_chain(&mut self, config: &Config, chain: Chain) {
// If dealing with multiple chains, we need to be able to change in between the config
// chain_id.
self.etherscan.key = config.get_etherscan_api_key(Some(chain));
self.etherscan.chain = Some(chain);
}
/// Given a `VerifyBundle` and contract details, it tries to generate a valid `VerifyArgs` to
/// use against the `contract_address`.
pub fn get_verify_args(
&self,
contract_address: Address,
create2_offset: usize,
data: &[u8],
libraries: &[String],
evm_version: EvmVersion,
) -> Option<VerifyArgs> {
for (artifact, contract) in self.known_contracts.iter() {
let Some(bytecode) = contract.bytecode() else { continue };
// If it's a CREATE2, the tx.data comes with a 32-byte salt in the beginning
// of the transaction
if data.split_at(create2_offset).1.starts_with(bytecode) {
let constructor_args = data.split_at(create2_offset + bytecode.len()).1.to_vec();
if artifact.source.extension().is_some_and(|e| e.to_str() == Some("vy")) {
warn!("Skipping verification of Vyper contract: {}", artifact.name);
}
// Strip artifact profile from contract name when creating contract info.
let contract = ContractInfo {
path: Some(artifact.source.to_string_lossy().to_string()),
name: artifact
.name
.strip_suffix(&format!(".{}", &artifact.profile))
.unwrap_or_else(|| &artifact.name)
.to_string(),
};
// We strip the build metadata information, since it can lead to
// etherscan not identifying it correctly. eg:
// `v0.8.10+commit.fc410830.Linux.gcc` != `v0.8.10+commit.fc410830`
let version = Version::new(
artifact.version.major,
artifact.version.minor,
artifact.version.patch,
);
let verify = VerifyArgs {
address: contract_address,
contract: Some(contract),
compiler_version: Some(version.to_string()),
constructor_args: Some(hex::encode(constructor_args)),
constructor_args_path: None,
no_auto_detect: false,
use_solc: None,
num_of_optimizations: self.num_of_optimizations,
etherscan: self.etherscan.clone(),
rpc: Default::default(),
flatten: false,
force: false,
skip_is_verified_check: true,
watch: true,
retry: self.retry,
libraries: libraries.to_vec(),
root: None,
verifier: self.verifier.clone(),
via_ir: self.via_ir,
evm_version: Some(evm_version),
show_standard_json_input: false,
guess_constructor_args: false,
compilation_profile: Some(artifact.profile.to_string()),
language: None,
creation_transaction_hash: None,
};
return Some(verify);
}
}
None
}
}
/// Given the broadcast log, it matches transactions with receipts, and tries to verify any
/// created contract on etherscan.
async fn verify_contracts(
sequence: &mut ScriptSequence,
config: &Config,
mut verify: VerifyBundle,
) -> Result<()> {
trace!(target: "script", "verifying {} contracts [{}]", verify.known_contracts.len(), sequence.chain);
verify.set_chain(config, sequence.chain.into());
if verify.etherscan.has_key() || verify.verifier.verifier != VerificationProviderType::Etherscan
{
trace!(target: "script", "prepare future verifications");
let mut future_verifications = Vec::with_capacity(sequence.receipts.len());
let mut unverifiable_contracts = vec![];
// Make sure the receipts have the right order first.
sequence.sort_receipts();
for (receipt, tx) in sequence.receipts.iter_mut().zip(sequence.transactions.iter()) {
// create2 hash offset
let mut offset = 0;
if tx.is_create2() {
receipt.contract_address = tx.contract_address;
offset = 32;
}
// Verify contract created directly from the transaction
if let (Some(address), Some(data)) = (receipt.contract_address, tx.tx().input()) {
match verify.get_verify_args(
address,
offset,
data,
&sequence.libraries,
config.evm_version,
) {
Some(verify) => future_verifications.push(verify.run()),
None => unverifiable_contracts.push(address),
};
}
// Verify potential contracts created during the transaction execution
for AdditionalContract { address, init_code, .. } in &tx.additional_contracts {
match verify.get_verify_args(
*address,
0,
init_code.as_ref(),
&sequence.libraries,
config.evm_version,
) {
Some(verify) => future_verifications.push(verify.run()),
None => unverifiable_contracts.push(*address),
};
}
}
trace!(target: "script", "collected {} verification jobs and {} unverifiable contracts", future_verifications.len(), unverifiable_contracts.len());
check_unverified(sequence, unverifiable_contracts, verify);
let num_verifications = future_verifications.len();
let mut num_of_successful_verifications = 0;
sh_println!("##\nStart verification for ({num_verifications}) contracts")?;
for verification in future_verifications {
match verification.await {
Ok(_) => {
num_of_successful_verifications += 1;
}
Err(err) => {
sh_err!("Failed to verify contract: {err:#}")?;
}
}
}
if num_of_successful_verifications < num_verifications {
return Err(eyre!(
"Not all ({num_of_successful_verifications} / {num_verifications}) contracts were verified!"
));
}
sh_println!("All ({num_verifications}) contracts were verified!")?;
}
Ok(())
}
fn check_unverified(
sequence: &ScriptSequence,
unverifiable_contracts: Vec<Address>,
verify: VerifyBundle,
) {
if !unverifiable_contracts.is_empty() {
let _ = sh_warn!(
"We haven't found any matching bytecode for the following contracts: {:?}.\n\n\
This may occur when resuming a verification, but the underlying source code or compiler version has changed.\n\
Run `forge clean` to make sure builds are in sync with project files, then try again. Alternatively, use `forge verify-contract` to verify contracts that are already deployed.",
unverifiable_contracts
);
if let Some(commit) = &sequence.commit {
let current_commit = verify
.project_paths
.root
.map(|root| get_commit_hash(&root).unwrap_or_default())
.unwrap_or_default();
if ¤t_commit != commit {
let _ = sh_warn!(
"Script was broadcasted on commit `{commit}`, but we are at `{current_commit}`."
);
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/progress.rs | crates/script/src/progress.rs | use crate::receipts::{PendingReceiptError, TxStatus, check_tx_status, format_receipt};
use alloy_chains::Chain;
use alloy_primitives::{
B256,
map::{B256HashMap, HashMap},
};
use eyre::Result;
use forge_script_sequence::ScriptSequence;
use foundry_cli::utils::init_progress;
use foundry_common::{provider::RetryProvider, shell};
use futures::StreamExt;
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use parking_lot::RwLock;
use std::{fmt::Write, sync::Arc, time::Duration};
use yansi::Paint;
/// State of [ProgressBar]s displayed for the given [ScriptSequence].
#[derive(Debug)]
pub struct SequenceProgressState {
/// The top spinner with content of the format "Sequence #{id} on {network} | {status}""
top_spinner: ProgressBar,
/// Progress bar with the count of transactions.
txs: ProgressBar,
/// Progress var with the count of confirmed transactions.
receipts: ProgressBar,
/// Standalone spinners for pending transactions.
tx_spinners: B256HashMap<ProgressBar>,
/// Copy of the main [MultiProgress] instance.
multi: MultiProgress,
}
impl SequenceProgressState {
pub fn new(sequence_idx: usize, sequence: &ScriptSequence, multi: MultiProgress) -> Self {
let mut state = if shell::is_quiet() || shell::is_json() {
let top_spinner = ProgressBar::hidden();
let txs = ProgressBar::hidden();
let receipts = ProgressBar::hidden();
Self { top_spinner, txs, receipts, tx_spinners: Default::default(), multi }
} else {
let mut template = "{spinner:.green}".to_string();
write!(template, " Sequence #{} on {}", sequence_idx + 1, Chain::from(sequence.chain))
.unwrap();
template.push_str("{msg}");
let top_spinner = ProgressBar::new_spinner().with_style(
ProgressStyle::with_template(&template).unwrap().tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈✅"),
);
let top_spinner = multi.add(top_spinner);
let txs = multi.insert_after(
&top_spinner,
init_progress(sequence.transactions.len() as u64, "txes").with_prefix(" "),
);
let receipts = multi.insert_after(
&txs,
init_progress(sequence.transactions.len() as u64, "receipts").with_prefix(" "),
);
top_spinner.enable_steady_tick(Duration::from_millis(100));
txs.enable_steady_tick(Duration::from_millis(1000));
receipts.enable_steady_tick(Duration::from_millis(1000));
txs.set_position(sequence.receipts.len() as u64);
receipts.set_position(sequence.receipts.len() as u64);
Self { top_spinner, txs, receipts, tx_spinners: Default::default(), multi }
};
for tx_hash in &sequence.pending {
state.tx_sent(*tx_hash);
}
state
}
/// Called when a new transaction is sent. Displays a spinner with a hash of the transaction and
/// advances the sent transactions progress bar.
pub fn tx_sent(&mut self, tx_hash: B256) {
// Avoid showing more than 10 spinners.
if self.tx_spinners.len() < 10 {
let spinner = if shell::is_quiet() || shell::is_json() {
ProgressBar::hidden()
} else {
let spinner = ProgressBar::new_spinner()
.with_style(
ProgressStyle::with_template(" {spinner:.green} {msg}")
.unwrap()
.tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈"),
)
.with_message(format!("{} {}", "[Pending]".yellow(), tx_hash));
let spinner = self.multi.insert_before(&self.txs, spinner);
spinner.enable_steady_tick(Duration::from_millis(100));
spinner
};
self.tx_spinners.insert(tx_hash, spinner);
}
self.txs.inc(1);
}
/// Removes the pending transaction spinner and advances confirmed transactions progress bar.
pub fn finish_tx_spinner(&mut self, tx_hash: B256) {
if let Some(spinner) = self.tx_spinners.remove(&tx_hash) {
spinner.finish_and_clear();
}
self.receipts.inc(1);
}
/// Same as finish_tx_spinner but also prints a message to stdout above all other progress bars.
pub fn finish_tx_spinner_with_msg(&mut self, tx_hash: B256, msg: &str) -> std::io::Result<()> {
self.finish_tx_spinner(tx_hash);
if !(shell::is_quiet() || shell::is_json()) {
self.multi.println(msg)?;
}
Ok(())
}
/// Sets status for the current sequence progress.
pub fn set_status(&mut self, status: &str) {
self.top_spinner.set_message(format!(" | {status}"));
}
/// Hides transactions and receipts progress bar, leaving only top line with the latest set
/// status.
pub fn finish(&self) {
self.top_spinner.finish();
self.txs.finish_and_clear();
self.receipts.finish_and_clear();
}
}
/// Cloneable wrapper around [SequenceProgressState].
#[derive(Debug, Clone)]
pub struct SequenceProgress {
pub inner: Arc<RwLock<SequenceProgressState>>,
}
impl SequenceProgress {
pub fn new(sequence_idx: usize, sequence: &ScriptSequence, multi: MultiProgress) -> Self {
Self {
inner: Arc::new(RwLock::new(SequenceProgressState::new(sequence_idx, sequence, multi))),
}
}
}
/// Container for multiple [SequenceProgress] instances keyed by sequence index.
#[derive(Debug, Clone, Default)]
pub struct ScriptProgress {
state: Arc<RwLock<HashMap<usize, SequenceProgress>>>,
multi: MultiProgress,
}
impl ScriptProgress {
/// Returns a [SequenceProgress] instance for the given sequence index. If it doesn't exist,
/// creates one.
pub fn get_sequence_progress(
&self,
sequence_idx: usize,
sequence: &ScriptSequence,
) -> SequenceProgress {
if let Some(progress) = self.state.read().get(&sequence_idx) {
return progress.clone();
}
let progress = SequenceProgress::new(sequence_idx, sequence, self.multi.clone());
self.state.write().insert(sequence_idx, progress.clone());
progress
}
/// Traverses a set of pending transactions and either finds receipts, or clears
/// them from the deployment sequence.
///
/// For each `tx_hash`, we check if it has confirmed. If it has
/// confirmed, we push the receipt (if successful) or push an error (if
/// revert). If the transaction has not confirmed, but can be found in the
/// node's mempool, we wait for its receipt to be available. If the transaction
/// has not confirmed, and cannot be found in the mempool, we remove it from
/// the `deploy_sequence.pending` vector so that it will be rebroadcast in
/// later steps.
pub async fn wait_for_pending(
&self,
sequence_idx: usize,
deployment_sequence: &mut ScriptSequence,
provider: &RetryProvider,
timeout: u64,
) -> Result<()> {
if deployment_sequence.pending.is_empty() {
return Ok(());
}
let count = deployment_sequence.pending.len();
let seq_progress = self.get_sequence_progress(sequence_idx, deployment_sequence);
seq_progress.inner.write().set_status("Waiting for pending transactions");
trace!("Checking status of {count} pending transactions");
let futs = deployment_sequence
.pending
.clone()
.into_iter()
.map(|tx| check_tx_status(provider, tx, timeout));
let mut tasks = futures::stream::iter(futs).buffer_unordered(10);
let mut errors: Vec<String> = vec![];
let mut discarded_transactions = false;
while let Some((tx_hash, result)) = tasks.next().await {
match result {
Err(err) => {
// Check if this is a retry error for pending receipts
if err.downcast_ref::<PendingReceiptError>().is_some() {
// We've already retried several times with sleep, but the receipt is still
// pending
discarded_transactions = true;
deployment_sequence.remove_pending(tx_hash);
seq_progress
.inner
.write()
.finish_tx_spinner_with_msg(tx_hash, &err.to_string())?;
} else {
errors.push(format!(
"Failure on receiving a receipt for {tx_hash:?}:\n{err}"
));
seq_progress.inner.write().finish_tx_spinner(tx_hash);
}
}
Ok(TxStatus::Dropped) => {
// We want to remove it from pending so it will be re-broadcast.
deployment_sequence.remove_pending(tx_hash);
discarded_transactions = true;
let msg = format!(
"Transaction {tx_hash:?} dropped from the mempool. It will be retried when using --resume."
);
seq_progress.inner.write().finish_tx_spinner_with_msg(tx_hash, &msg)?;
}
Ok(TxStatus::Success(receipt)) => {
trace!(tx_hash=?tx_hash, "received tx receipt");
let msg = format_receipt(deployment_sequence.chain.into(), &receipt);
seq_progress.inner.write().finish_tx_spinner_with_msg(tx_hash, &msg)?;
deployment_sequence.remove_pending(receipt.transaction_hash);
deployment_sequence.add_receipt(receipt);
}
Ok(TxStatus::Revert(receipt)) => {
// consider:
// if this is not removed from pending, then the script becomes
// un-resumable. Is this desirable on reverts?
warn!(tx_hash=?tx_hash, "Transaction Failure");
deployment_sequence.remove_pending(receipt.transaction_hash);
let msg = format_receipt(deployment_sequence.chain.into(), &receipt);
seq_progress.inner.write().finish_tx_spinner_with_msg(tx_hash, &msg)?;
errors.push(format!("Transaction Failure: {:?}", receipt.transaction_hash));
}
}
}
// print any errors
if !errors.is_empty() {
let mut error_msg = errors.join("\n");
// Add information about using --resume if necessary
if !deployment_sequence.pending.is_empty() || discarded_transactions {
error_msg += r#"
Add `--resume` to your command to try and continue broadcasting the transactions. This will attempt to resend transactions that were discarded by the RPC."#;
}
eyre::bail!(error_msg);
} else if discarded_transactions {
// If we have discarded transactions but no errors, still inform the user
sh_warn!(
"Some transactions were discarded by the RPC node. Use `--resume` to retry these transactions."
)?;
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/simulate.rs | crates/script/src/simulate.rs | use super::{
multi_sequence::MultiChainSequence, providers::ProvidersManager, runner::ScriptRunner,
sequence::ScriptSequenceKind, transaction::ScriptTransactionBuilder,
};
use crate::{
ScriptArgs, ScriptConfig, ScriptResult,
broadcast::{BundledState, estimate_gas},
build::LinkedBuildData,
execute::{ExecutionArtifacts, ExecutionData},
sequence::get_commit_hash,
};
use alloy_chains::NamedChain;
use alloy_network::TransactionBuilder;
use alloy_primitives::{Address, TxKind, U256, map::HashMap, utils::format_units};
use dialoguer::Confirm;
use eyre::{Context, Result};
use forge_script_sequence::{ScriptSequence, TransactionWithMetadata};
use foundry_cheatcodes::Wallets;
use foundry_cli::utils::{has_different_gas_calc, now};
use foundry_common::{ContractData, shell};
use foundry_evm::traces::{decode_trace_arena, render_trace_arena};
use futures::future::{join_all, try_join_all};
use parking_lot::RwLock;
use std::{
collections::{BTreeMap, VecDeque},
mem,
sync::Arc,
};
/// Same as [ExecutedState](crate::execute::ExecutedState), but also contains [ExecutionArtifacts]
/// which are obtained from [ScriptResult].
///
/// Can be either converted directly to [BundledState] or driven to it through
/// [FilledTransactionsState].
pub struct PreSimulationState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
pub execution_data: ExecutionData,
pub execution_result: ScriptResult,
pub execution_artifacts: ExecutionArtifacts,
}
impl PreSimulationState {
/// If simulation is enabled, simulates transactions against fork and fills gas estimation and
/// metadata. Otherwise, metadata (e.g. additional contracts, created contract names) is
/// left empty.
///
/// Both modes will panic if any of the transactions have None for the `rpc` field.
pub async fn fill_metadata(self) -> Result<FilledTransactionsState> {
let address_to_abi = self.build_address_to_abi_map();
let mut transactions = self
.execution_result
.transactions
.clone()
.unwrap_or_default()
.into_iter()
.map(|tx| {
let rpc = tx.rpc.expect("missing broadcastable tx rpc url");
let sender = tx.transaction.from().expect("all transactions should have a sender");
let nonce = tx.transaction.nonce().expect("all transactions should have a sender");
let to = tx.transaction.to();
let mut builder = ScriptTransactionBuilder::new(tx.transaction, rpc);
if let Some(TxKind::Call(_)) = to {
builder.set_call(
&address_to_abi,
&self.execution_artifacts.decoder,
self.script_config.evm_opts.create2_deployer,
)?;
} else {
builder.set_create(false, sender.create(nonce), &address_to_abi)?;
}
Ok(builder.build())
})
.collect::<Result<VecDeque<_>>>()?;
if self.args.skip_simulation {
sh_println!("\nSKIPPING ON CHAIN SIMULATION.")?;
} else {
transactions = self.simulate_and_fill(transactions).await?;
}
Ok(FilledTransactionsState {
args: self.args,
script_config: self.script_config,
script_wallets: self.script_wallets,
build_data: self.build_data,
execution_artifacts: self.execution_artifacts,
transactions,
})
}
/// Builds separate runners and environments for each RPC used in script and executes all
/// transactions in those environments.
///
/// Collects gas usage and metadata for each transaction.
pub async fn simulate_and_fill(
&self,
transactions: VecDeque<TransactionWithMetadata>,
) -> Result<VecDeque<TransactionWithMetadata>> {
trace!(target: "script", "executing onchain simulation");
let runners = Arc::new(
self.build_runners()
.await?
.into_iter()
.map(|(rpc, runner)| (rpc, Arc::new(RwLock::new(runner))))
.collect::<HashMap<_, _>>(),
);
let mut final_txs = VecDeque::new();
// Executes all transactions from the different forks concurrently.
let futs = transactions
.into_iter()
.map(|mut transaction| async {
let mut runner = runners.get(&transaction.rpc).expect("invalid rpc url").write();
let tx = transaction.tx_mut();
let to = if let Some(TxKind::Call(to)) = tx.to() { Some(to) } else { None };
let result = runner
.simulate(
tx.from()
.expect("transaction doesn't have a `from` address at execution time"),
to,
tx.input().cloned(),
tx.value(),
tx.authorization_list(),
)
.wrap_err("Internal EVM error during simulation")?;
if !result.success {
return Ok((None, false, result.traces));
}
// Simulate mining the transaction if the user passes `--slow`.
if self.args.slow {
runner.executor.env_mut().evm_env.block_env.number += U256::from(1);
}
let is_noop_tx = if let Some(to) = to {
runner.executor.is_empty_code(to)? && tx.value().unwrap_or_default().is_zero()
} else {
false
};
let transaction = ScriptTransactionBuilder::from(transaction)
.with_execution_result(
&result,
self.args.gas_estimate_multiplier,
&self.build_data,
)
.build();
eyre::Ok((Some(transaction), is_noop_tx, result.traces))
})
.collect::<Vec<_>>();
if !shell::is_json() && self.script_config.evm_opts.verbosity > 3 {
sh_println!("==========================")?;
sh_println!("Simulated On-chain Traces:\n")?;
}
let mut abort = false;
for res in join_all(futs).await {
let (tx, is_noop_tx, mut traces) = res?;
// Transaction will be `None`, if execution didn't pass.
if tx.is_none() || self.script_config.evm_opts.verbosity > 3 {
for (_, trace) in &mut traces {
decode_trace_arena(trace, &self.execution_artifacts.decoder).await;
sh_println!("{}", render_trace_arena(trace))?;
}
}
if let Some(tx) = tx {
if is_noop_tx {
let to = tx.contract_address.unwrap();
sh_warn!(
"Script contains a transaction to {to} which does not contain any code."
)?;
// Only prompt if we're broadcasting and we've not disabled interactivity.
if self.args.should_broadcast()
&& !self.args.non_interactive
&& !Confirm::new()
.with_prompt("Do you wish to continue?".to_string())
.interact()?
{
eyre::bail!("User canceled the script.");
}
}
final_txs.push_back(tx);
} else {
abort = true;
}
}
if abort {
eyre::bail!("Simulated execution failed.")
}
Ok(final_txs)
}
/// Build mapping from contract address to its ABI, code and contract name.
fn build_address_to_abi_map(&self) -> BTreeMap<Address, &ContractData> {
self.execution_artifacts
.decoder
.contracts
.iter()
.filter_map(move |(addr, contract_id)| {
if let Ok(Some((_, data))) =
self.build_data.known_contracts.find_by_name_or_identifier(contract_id)
{
return Some((*addr, data));
}
None
})
.collect()
}
/// Build [ScriptRunner] forking given RPC for each RPC used in the script.
async fn build_runners(&self) -> Result<Vec<(String, ScriptRunner)>> {
let rpcs = self.execution_artifacts.rpc_data.total_rpcs.clone();
if !shell::is_json() {
let n = rpcs.len();
let s = if n != 1 { "s" } else { "" };
sh_println!("\n## Setting up {n} EVM{s}.")?;
}
let futs = rpcs.into_iter().map(|rpc| async move {
let mut script_config = self.script_config.clone();
script_config.evm_opts.fork_url = Some(rpc.clone());
let runner = script_config.get_runner().await?;
Ok((rpc, runner))
});
try_join_all(futs).await
}
}
/// At this point we have converted transactions collected during script execution to
/// [TransactionWithMetadata] objects which contain additional metadata needed for broadcasting and
/// verification.
pub struct FilledTransactionsState {
pub args: ScriptArgs,
pub script_config: ScriptConfig,
pub script_wallets: Wallets,
pub build_data: LinkedBuildData,
pub execution_artifacts: ExecutionArtifacts,
pub transactions: VecDeque<TransactionWithMetadata>,
}
impl FilledTransactionsState {
/// Bundles all transactions of the [`TransactionWithMetadata`] type in a list of
/// [`ScriptSequence`]. List length will be higher than 1, if we're dealing with a multi
/// chain deployment.
///
/// Each transaction will be added with the correct transaction type and gas estimation.
pub async fn bundle(mut self) -> Result<BundledState> {
let is_multi_deployment = self.execution_artifacts.rpc_data.total_rpcs.len() > 1;
if is_multi_deployment && !self.build_data.libraries.is_empty() {
eyre::bail!("Multi-chain deployment is not supported with libraries.");
}
let mut total_gas_per_rpc: HashMap<String, u128> = HashMap::default();
// Batches sequence of transactions from different rpcs.
let mut new_sequence = VecDeque::new();
let mut manager = ProvidersManager::default();
let mut sequences = vec![];
// Peeking is used to check if the next rpc url is different. If so, it creates a
// [`ScriptSequence`] from all the collected transactions up to this point.
let mut txes_iter = mem::take(&mut self.transactions).into_iter().peekable();
while let Some(mut tx) = txes_iter.next() {
let tx_rpc = tx.rpc.to_owned();
let provider_info = manager.get_or_init_provider(&tx.rpc, self.args.legacy).await?;
if let Some(tx) = tx.tx_mut().as_unsigned_mut() {
// Handles chain specific requirements for unsigned transactions.
tx.set_chain_id(provider_info.chain);
}
if !self.args.skip_simulation {
let tx = tx.tx_mut();
if has_different_gas_calc(provider_info.chain) {
// only estimate gas for unsigned transactions
if let Some(tx) = tx.as_unsigned_mut() {
trace!("estimating with different gas calculation");
let gas = tx.gas.expect("gas is set by simulation.");
// We are trying to show the user an estimation of the total gas usage.
//
// However, some transactions might depend on previous ones. For
// example, tx1 might deploy a contract that tx2 uses. That
// will result in the following `estimate_gas` call to fail,
// since tx1 hasn't been broadcasted yet.
//
// Not exiting here will not be a problem when actually broadcasting,
// because for chains where `has_different_gas_calc`
// returns true, we await each transaction before
// broadcasting the next one.
if let Err(err) = estimate_gas(
tx,
&provider_info.provider,
self.args.gas_estimate_multiplier,
)
.await
{
trace!("gas estimation failed: {err}");
// Restore gas value, since `estimate_gas` will remove it.
tx.set_gas_limit(gas);
}
}
}
let total_gas = total_gas_per_rpc.entry(tx_rpc.clone()).or_insert(0);
*total_gas += tx.gas().expect("gas is set");
}
new_sequence.push_back(tx);
// We only create a [`ScriptSequence`] object when we collect all the rpc related
// transactions.
if let Some(next_tx) = txes_iter.peek()
&& next_tx.rpc == tx_rpc
{
continue;
}
let sequence =
self.create_sequence(is_multi_deployment, provider_info.chain, new_sequence)?;
sequences.push(sequence);
new_sequence = VecDeque::new();
}
if !self.args.skip_simulation {
// Present gas information on a per RPC basis.
for (rpc, total_gas) in total_gas_per_rpc {
let provider_info = manager.get(&rpc).expect("provider is set.");
// Get the native token symbol for the chain using NamedChain
let token_symbol = NamedChain::try_from(provider_info.chain)
.unwrap_or_default()
.native_currency_symbol()
.unwrap_or("ETH");
// We don't store it in the transactions, since we want the most updated value.
// Right before broadcasting.
let per_gas = if let Some(gas_price) = self.args.with_gas_price {
gas_price.to()
} else {
provider_info.gas_price()?
};
let estimated_gas_price_raw = format_units(per_gas, 9)
.unwrap_or_else(|_| "[Could not calculate]".to_string());
let estimated_gas_price =
estimated_gas_price_raw.trim_end_matches('0').trim_end_matches('.');
let estimated_amount_raw = format_units(total_gas.saturating_mul(per_gas), 18)
.unwrap_or_else(|_| "[Could not calculate]".to_string());
let estimated_amount = estimated_amount_raw.trim_end_matches('0');
if !shell::is_json() {
sh_println!("\n==========================")?;
sh_println!("\nChain {}", provider_info.chain)?;
sh_println!("\nEstimated gas price: {} gwei", estimated_gas_price)?;
sh_println!("\nEstimated total gas used for script: {total_gas}")?;
sh_println!("\nEstimated amount required: {estimated_amount} {token_symbol}")?;
sh_println!("\n==========================")?;
} else {
sh_println!(
"{}",
serde_json::json!({
"chain": provider_info.chain,
"estimated_gas_price": estimated_gas_price,
"estimated_total_gas_used": total_gas,
"estimated_amount_required": estimated_amount,
"token_symbol": token_symbol,
})
)?;
}
}
}
let sequence = if sequences.len() == 1 {
ScriptSequenceKind::Single(sequences.pop().expect("empty sequences"))
} else {
ScriptSequenceKind::Multi(MultiChainSequence::new(
sequences,
&self.args.sig,
&self.build_data.build_data.target,
&self.script_config.config,
!self.args.broadcast,
)?)
};
Ok(BundledState {
args: self.args,
script_config: self.script_config,
script_wallets: self.script_wallets,
build_data: self.build_data,
sequence,
})
}
/// Creates a [ScriptSequence] object from the given transactions.
fn create_sequence(
&self,
multi: bool,
chain: u64,
transactions: VecDeque<TransactionWithMetadata>,
) -> Result<ScriptSequence> {
// Paths are set to None for multi-chain sequences parts, because they don't need to be
// saved to a separate file.
let paths = if multi {
None
} else {
Some(ScriptSequence::get_paths(
&self.script_config.config,
&self.args.sig,
&self.build_data.build_data.target,
chain,
!self.args.broadcast,
)?)
};
let commit = get_commit_hash(&self.script_config.config.root);
let libraries = self
.build_data
.libraries
.libs
.iter()
.flat_map(|(file, libs)| {
libs.iter()
.map(|(name, address)| format!("{}:{name}:{address}", file.to_string_lossy()))
})
.collect();
let sequence = ScriptSequence {
transactions,
returns: self.execution_artifacts.returns.clone(),
receipts: vec![],
pending: vec![],
paths,
timestamp: now().as_millis(),
libraries,
chain,
commit,
};
Ok(sequence)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/receipts.rs | crates/script/src/receipts.rs | use alloy_chains::{Chain, NamedChain};
use alloy_network::AnyTransactionReceipt;
use alloy_primitives::{TxHash, U256, utils::format_units};
use alloy_provider::{PendingTransactionBuilder, PendingTransactionError, Provider, WatchTxError};
use eyre::{Result, eyre};
use foundry_common::{provider::RetryProvider, retry, retry::RetryError, shell};
use std::time::Duration;
/// Marker error type for pending receipts
#[derive(Debug, thiserror::Error)]
#[error(
"Received a pending receipt for {tx_hash}, but transaction is still known to the node, retrying"
)]
pub struct PendingReceiptError {
pub tx_hash: TxHash,
}
/// Convenience enum for internal signalling of transaction status
pub enum TxStatus {
Dropped,
Success(AnyTransactionReceipt),
Revert(AnyTransactionReceipt),
}
impl From<AnyTransactionReceipt> for TxStatus {
fn from(receipt: AnyTransactionReceipt) -> Self {
if !receipt.inner.inner.inner.receipt.status.coerce_status() {
Self::Revert(receipt)
} else {
Self::Success(receipt)
}
}
}
/// Checks the status of a txhash by first polling for a receipt, then for
/// mempool inclusion. Returns the tx hash, and a status
pub async fn check_tx_status(
provider: &RetryProvider,
hash: TxHash,
timeout: u64,
) -> (TxHash, Result<TxStatus, eyre::Report>) {
let result = retry::Retry::new_no_delay(3)
.run_async_until_break(|| async {
match PendingTransactionBuilder::new(provider.clone(), hash)
.with_timeout(Some(Duration::from_secs(timeout)))
.get_receipt()
.await
{
Ok(receipt) => {
// Check if the receipt is pending (missing block information)
let is_pending = receipt.block_number.is_none()
|| receipt.block_hash.is_none()
|| receipt.transaction_index.is_none();
if !is_pending {
return Ok(receipt.into());
}
// Receipt is pending, try to sleep and retry a few times
match provider.get_transaction_by_hash(hash).await {
Ok(_) => {
// Sleep for a short time to allow the transaction to be mined
tokio::time::sleep(Duration::from_millis(500)).await;
// Transaction is still known to the node, retry
Err(RetryError::Retry(PendingReceiptError { tx_hash: hash }.into()))
}
Err(_) => {
// Transaction is not known to the node, mark it as dropped
Ok(TxStatus::Dropped)
}
}
}
Err(e) => match provider.get_transaction_by_hash(hash).await {
Ok(_) => match e {
PendingTransactionError::TxWatcher(WatchTxError::Timeout) => {
Err(RetryError::Continue(eyre!(
"tx is still known to the node, waiting for receipt"
)))
}
_ => Err(RetryError::Retry(e.into())),
},
Err(_) => Ok(TxStatus::Dropped),
},
}
})
.await;
(hash, result)
}
/// Prints parts of the receipt to stdout
pub fn format_receipt(chain: Chain, receipt: &AnyTransactionReceipt) -> String {
let gas_used = receipt.gas_used;
let gas_price = receipt.effective_gas_price;
let block_number = receipt.block_number.unwrap_or_default();
let success = receipt.inner.inner.inner.receipt.status.coerce_status();
if shell::is_json() {
let _ = sh_println!(
"{}",
serde_json::json!({
"chain": chain,
"status": if success {
"success"
} else {
"failed"
},
"tx_hash": receipt.transaction_hash,
"contract_address": receipt.contract_address.map(|addr| addr.to_string()),
"block_number": block_number,
"gas_used": gas_used,
"gas_price": gas_price,
})
);
String::new()
} else {
format!(
"\n##### {chain}\n{status} Hash: {tx_hash:?}{contract_address}\nBlock: {block_number}\n{gas}\n\n",
status = if success { "✅ [Success]" } else { "❌ [Failed]" },
tx_hash = receipt.transaction_hash,
contract_address = if let Some(addr) = &receipt.contract_address {
format!("\nContract Address: {}", addr.to_checksum(None))
} else {
String::new()
},
gas = if gas_price == 0 {
format!("Gas Used: {gas_used}")
} else {
let paid = format_units((gas_used as u128).saturating_mul(gas_price), 18)
.unwrap_or_else(|_| "N/A".into());
let gas_price =
format_units(U256::from(gas_price), 9).unwrap_or_else(|_| "N/A".into());
let token_symbol = NamedChain::try_from(chain)
.unwrap_or_default()
.native_currency_symbol()
.unwrap_or("ETH");
format!(
"Paid: {} {} ({gas_used} gas * {} gwei)",
paid.trim_end_matches('0'),
token_symbol,
gas_price.trim_end_matches('0').trim_end_matches('.')
)
},
)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/providers.rs | crates/script/src/providers.rs | use alloy_primitives::map::{HashMap, hash_map::Entry};
use alloy_provider::{Provider, utils::Eip1559Estimation};
use eyre::{Result, WrapErr};
use foundry_common::provider::{RetryProvider, get_http_provider};
use foundry_config::Chain;
use std::{ops::Deref, sync::Arc};
/// Contains a map of RPC urls to single instances of [`ProviderInfo`].
#[derive(Default)]
pub struct ProvidersManager {
pub inner: HashMap<String, ProviderInfo>,
}
impl ProvidersManager {
/// Get or initialize the RPC provider.
pub async fn get_or_init_provider(
&mut self,
rpc: &str,
is_legacy: bool,
) -> Result<&ProviderInfo> {
Ok(match self.inner.entry(rpc.to_string()) {
Entry::Occupied(entry) => entry.into_mut(),
Entry::Vacant(entry) => {
let info = ProviderInfo::new(rpc, is_legacy).await?;
entry.insert(info)
}
})
}
}
impl Deref for ProvidersManager {
type Target = HashMap<String, ProviderInfo>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
/// Holds related metadata to each provider RPC.
#[derive(Debug)]
pub struct ProviderInfo {
pub provider: Arc<RetryProvider>,
pub chain: u64,
pub gas_price: GasPrice,
}
/// Represents the outcome of a gas price request
#[derive(Debug)]
pub enum GasPrice {
Legacy(Result<u128>),
EIP1559(Result<Eip1559Estimation>),
}
impl ProviderInfo {
pub async fn new(rpc: &str, mut is_legacy: bool) -> Result<Self> {
let provider = Arc::new(get_http_provider(rpc));
let chain = provider.get_chain_id().await?;
if let Some(chain) = Chain::from(chain).named() {
is_legacy |= chain.is_legacy();
};
let gas_price = if is_legacy {
GasPrice::Legacy(
provider.get_gas_price().await.wrap_err("Failed to get legacy gas price"),
)
} else {
GasPrice::EIP1559(
provider.estimate_eip1559_fees().await.wrap_err("Failed to get EIP-1559 fees"),
)
};
Ok(Self { provider, chain, gas_price })
}
/// Returns the gas price to use
pub fn gas_price(&self) -> Result<u128> {
let res = match &self.gas_price {
GasPrice::Legacy(res) => res.as_ref(),
GasPrice::EIP1559(res) => res.as_ref().map(|res| &res.max_fee_per_gas),
};
match res {
Ok(val) => Ok(*val),
Err(err) => Err(eyre::eyre!("{}", err)),
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/transaction.rs | crates/script/src/transaction.rs | use super::ScriptResult;
use crate::build::LinkedBuildData;
use alloy_dyn_abi::JsonAbiExt;
use alloy_primitives::{Address, B256, TxKind, hex};
use eyre::Result;
use forge_script_sequence::TransactionWithMetadata;
use foundry_common::{ContractData, SELECTOR_LEN, TransactionMaybeSigned, fmt::format_token_raw};
use foundry_evm::traces::CallTraceDecoder;
use itertools::Itertools;
use revm_inspectors::tracing::types::CallKind;
use std::collections::BTreeMap;
#[derive(Debug)]
pub struct ScriptTransactionBuilder {
transaction: TransactionWithMetadata,
}
impl ScriptTransactionBuilder {
pub fn new(transaction: TransactionMaybeSigned, rpc: String) -> Self {
let mut transaction = TransactionWithMetadata::from_tx_request(transaction);
transaction.rpc = rpc;
// If tx.gas is already set that means it was specified in script
transaction.is_fixed_gas_limit = transaction.tx().gas().is_some();
Self { transaction }
}
/// Populate the transaction as CALL tx
pub fn set_call(
&mut self,
local_contracts: &BTreeMap<Address, &ContractData>,
decoder: &CallTraceDecoder,
create2_deployer: Address,
) -> Result<()> {
if let Some(TxKind::Call(to)) = self.transaction.transaction.to() {
if to == create2_deployer {
if let Some(input) = self.transaction.transaction.input() {
let (salt, init_code) = input.split_at(32);
self.set_create(
true,
create2_deployer.create2_from_code(B256::from_slice(salt), init_code),
local_contracts,
)?;
}
} else {
self.transaction.opcode = CallKind::Call;
self.transaction.contract_address = Some(to);
let Some(data) = self.transaction.transaction.input() else { return Ok(()) };
if data.len() < SELECTOR_LEN {
return Ok(());
}
let (selector, data) = data.split_at(SELECTOR_LEN);
let function = if let Some(info) = local_contracts.get(&to) {
// This CALL is made to a local contract.
self.transaction.contract_name = Some(info.name.clone());
info.abi.functions().find(|function| function.selector() == selector)
} else {
// This CALL is made to an external contract; try to decode it from the given
// decoder.
decoder.functions.get(selector).and_then(|v| v.first())
};
if let Some(function) = function {
self.transaction.function = Some(function.signature());
let values = function.abi_decode_input(data).inspect_err(|_| {
error!(
contract=?self.transaction.contract_name,
signature=?function,
data=hex::encode(data),
"Failed to decode function arguments",
);
})?;
self.transaction.arguments =
Some(values.iter().map(format_token_raw).collect());
}
}
}
Ok(())
}
/// Populate the transaction as CREATE tx
///
/// If this is a CREATE2 transaction this attempt to decode the arguments from the CREATE2
/// deployer's function
pub fn set_create(
&mut self,
is_create2: bool,
address: Address,
contracts: &BTreeMap<Address, &ContractData>,
) -> Result<()> {
if is_create2 {
self.transaction.opcode = CallKind::Create2;
} else {
self.transaction.opcode = CallKind::Create;
}
let info = contracts.get(&address);
self.transaction.contract_name = info.map(|info| info.name.clone());
self.transaction.contract_address = Some(address);
let Some(data) = self.transaction.transaction.input() else { return Ok(()) };
let Some(info) = info else { return Ok(()) };
let Some(bytecode) = info.bytecode() else { return Ok(()) };
// `create2` transactions are prefixed by a 32 byte salt.
let creation_code = if is_create2 {
if data.len() < 32 {
return Ok(());
}
&data[32..]
} else {
data
};
// The constructor args start after bytecode.
let contains_constructor_args = creation_code.len() > bytecode.len();
if !contains_constructor_args {
return Ok(());
}
let constructor_args = &creation_code[bytecode.len()..];
let Some(constructor) = info.abi.constructor() else { return Ok(()) };
let values = constructor.abi_decode_input(constructor_args).inspect_err(|_| {
error!(
contract=?self.transaction.contract_name,
signature=%format!("constructor({})", constructor.inputs.iter().map(|p| &p.ty).format(",")),
is_create2,
constructor_args=%hex::encode(constructor_args),
"Failed to decode constructor arguments",
);
debug!(full_data=%hex::encode(data), bytecode=%hex::encode(creation_code));
})?;
self.transaction.arguments = Some(values.iter().map(format_token_raw).collect());
Ok(())
}
/// Populates additional data from the transaction execution result.
pub fn with_execution_result(
mut self,
result: &ScriptResult,
gas_estimate_multiplier: u64,
linked_build_data: &LinkedBuildData,
) -> Self {
let mut created_contracts =
result.get_created_contracts(&linked_build_data.known_contracts);
// Add the additional contracts created in this transaction, so we can verify them later.
created_contracts.retain(|contract| {
// Filter out the contract that was created by the transaction itself.
self.transaction.contract_address != Some(contract.address)
});
self.transaction.additional_contracts = created_contracts;
if !self.transaction.is_fixed_gas_limit
&& let Some(unsigned) = self.transaction.transaction.as_unsigned_mut()
{
// We inflate the gas used by the user specified percentage
unsigned.gas = Some(result.gas_used * gas_estimate_multiplier / 100);
}
self
}
pub fn build(self) -> TransactionWithMetadata {
self.transaction
}
}
impl From<TransactionWithMetadata> for ScriptTransactionBuilder {
fn from(transaction: TransactionWithMetadata) -> Self {
Self { transaction }
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script/src/multi_sequence.rs | crates/script/src/multi_sequence.rs | use eyre::{ContextCompat, Result, WrapErr};
use forge_script_sequence::{
DRY_RUN_DIR, ScriptSequence, SensitiveScriptSequence, now, sig_to_file_name,
};
use foundry_common::{fs, shell};
use foundry_compilers::ArtifactId;
use foundry_config::Config;
use serde::{Deserialize, Serialize};
use std::{
io::{BufWriter, Write},
path::PathBuf,
};
/// Holds the sequences of multiple chain deployments.
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct MultiChainSequence {
pub deployments: Vec<ScriptSequence>,
#[serde(skip)]
pub path: PathBuf,
#[serde(skip)]
pub sensitive_path: PathBuf,
pub timestamp: u128,
}
/// Sensitive values from script sequences.
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct SensitiveMultiChainSequence {
pub deployments: Vec<SensitiveScriptSequence>,
}
impl SensitiveMultiChainSequence {
fn from_multi_sequence(sequence: &MultiChainSequence) -> Self {
Self {
deployments: sequence.deployments.iter().map(SensitiveScriptSequence::from).collect(),
}
}
}
impl MultiChainSequence {
pub fn new(
deployments: Vec<ScriptSequence>,
sig: &str,
target: &ArtifactId,
config: &Config,
dry_run: bool,
) -> Result<Self> {
let (path, sensitive_path) = Self::get_paths(config, sig, target, dry_run)?;
Ok(Self { deployments, path, sensitive_path, timestamp: now().as_millis() })
}
/// Gets paths in the formats
/// ./broadcast/multi/contract_filename[-timestamp]/sig.json and
/// ./cache/multi/contract_filename[-timestamp]/sig.json
pub fn get_paths(
config: &Config,
sig: &str,
target: &ArtifactId,
dry_run: bool,
) -> Result<(PathBuf, PathBuf)> {
let mut broadcast = config.broadcast.to_path_buf();
let mut cache = config.cache_path.to_path_buf();
let mut common = PathBuf::new();
common.push("multi");
if dry_run {
common.push(DRY_RUN_DIR);
}
let target_fname = target
.source
.file_name()
.wrap_err_with(|| format!("No filename for {:?}", target.source))?
.to_string_lossy();
common.push(format!("{target_fname}-latest"));
broadcast.push(common.clone());
cache.push(common);
fs::create_dir_all(&broadcast)?;
fs::create_dir_all(&cache)?;
let filename = format!("{}.json", sig_to_file_name(sig));
broadcast.push(filename.clone());
cache.push(filename);
Ok((broadcast, cache))
}
/// Loads the sequences for the multi chain deployment.
pub fn load(config: &Config, sig: &str, target: &ArtifactId, dry_run: bool) -> Result<Self> {
let (path, sensitive_path) = Self::get_paths(config, sig, target, dry_run)?;
let mut sequence: Self = foundry_compilers::utils::read_json_file(&path)
.wrap_err("Multi-chain deployment not found.")?;
let sensitive_sequence: SensitiveMultiChainSequence =
foundry_compilers::utils::read_json_file(&sensitive_path)
.wrap_err("Multi-chain deployment sensitive details not found.")?;
sequence.deployments.iter_mut().enumerate().for_each(|(i, sequence)| {
sequence.fill_sensitive(&sensitive_sequence.deployments[i]);
});
sequence.path = path;
sequence.sensitive_path = sensitive_path;
Ok(sequence)
}
/// Saves the transactions as file if it's a standalone deployment.
pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> {
self.deployments.iter_mut().for_each(|sequence| sequence.sort_receipts());
self.timestamp = now().as_millis();
let sensitive_sequence = SensitiveMultiChainSequence::from_multi_sequence(&*self);
// broadcast writes
//../Contract-latest/run.json
let mut writer = BufWriter::new(fs::create_file(&self.path)?);
serde_json::to_writer_pretty(&mut writer, &self)?;
writer.flush()?;
if save_ts {
//../Contract-[timestamp]/run.json
let path = self.path.to_string_lossy();
let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp)));
fs::create_dir_all(file.parent().unwrap())?;
fs::copy(&self.path, &file)?;
}
// cache writes
//../Contract-latest/run.json
let mut writer = BufWriter::new(fs::create_file(&self.sensitive_path)?);
serde_json::to_writer_pretty(&mut writer, &sensitive_sequence)?;
writer.flush()?;
if save_ts {
//../Contract-[timestamp]/run.json
let path = self.sensitive_path.to_string_lossy();
let file = PathBuf::from(&path.replace("-latest", &format!("-{}", self.timestamp)));
fs::create_dir_all(file.parent().unwrap())?;
fs::copy(&self.sensitive_path, &file)?;
}
if !silent {
if shell::is_json() {
sh_println!(
"{}",
serde_json::json!({
"status": "success",
"transactions": self.path.display().to_string(),
"sensitive": self.sensitive_path.display().to_string(),
})
)?;
} else {
sh_println!("\nTransactions saved to: {}\n", self.path.display())?;
sh_println!("Sensitive details saved to: {}\n", self.sensitive_path.display())?;
}
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/sol-macro-gen/src/lib.rs | crates/sol-macro-gen/src/lib.rs | //! This crate contains the logic for Rust bindings generating from Solidity contracts
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
pub mod sol_macro_gen;
pub use sol_macro_gen::*;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/sol-macro-gen/src/sol_macro_gen.rs | crates/sol-macro-gen/src/sol_macro_gen.rs | //! SolMacroGen and MultiSolMacroGen
//!
//! This type encapsulates the logic for expansion of a Rust TokenStream from Solidity tokens. It
//! uses the `expand` method from `alloy_sol_macro_expander` underneath.
//!
//! It holds info such as `path` to the ABI file, `name` of the file and the rust binding being
//! generated, and lastly the `expansion` itself, i.e the Rust binding for the provided ABI.
//!
//! It contains methods to read the json abi, generate rust bindings from the abi and ultimately
//! write the bindings to a crate or modules.
use alloy_sol_macro_expander::expand::expand;
use alloy_sol_macro_input::{SolInput, SolInputKind};
use eyre::{Context, OptionExt, Result};
use foundry_common::fs;
use proc_macro2::{Span, TokenStream};
use std::{
fmt::Write,
path::{Path, PathBuf},
str::FromStr,
};
use heck::ToSnakeCase;
pub struct SolMacroGen {
pub path: PathBuf,
pub name: String,
pub expansion: Option<TokenStream>,
}
impl SolMacroGen {
pub fn new(path: PathBuf, name: String) -> Self {
Self { path, name, expansion: None }
}
pub fn get_sol_input(&self) -> Result<SolInput> {
let path = self.path.to_string_lossy().into_owned();
let name = proc_macro2::Ident::new(&self.name, Span::call_site());
let tokens = quote::quote! {
#[sol(ignore_unlinked)]
#name,
#path
};
let sol_input: SolInput = syn::parse2(tokens).wrap_err("failed to parse input")?;
Ok(sol_input)
}
}
pub struct MultiSolMacroGen {
pub instances: Vec<SolMacroGen>,
}
impl MultiSolMacroGen {
pub fn new(instances: Vec<SolMacroGen>) -> Self {
Self { instances }
}
pub fn populate_expansion(&mut self, bindings_path: &Path) -> Result<()> {
for instance in &mut self.instances {
let path = bindings_path.join(format!("{}.rs", instance.name.to_snake_case()));
let expansion = fs::read_to_string(path).wrap_err("Failed to read file")?;
let tokens = TokenStream::from_str(&expansion)
.map_err(|e| eyre::eyre!("Failed to parse TokenStream: {e}"))?;
instance.expansion = Some(tokens);
}
Ok(())
}
pub fn generate_bindings(&mut self, all_derives: bool) -> Result<()> {
for instance in &mut self.instances {
Self::generate_binding(instance, all_derives).wrap_err_with(|| {
format!(
"failed to generate bindings for {}:{}",
instance.path.display(),
instance.name
)
})?;
}
Ok(())
}
fn generate_binding(instance: &mut SolMacroGen, all_derives: bool) -> Result<()> {
let input = instance.get_sol_input()?.normalize_json()?;
let SolInput { attrs: _, path: _, kind } = input;
let tokens = match kind {
SolInputKind::Sol(mut file) => {
let sol_attr: syn::Attribute = if all_derives {
syn::parse_quote! {
#[sol(rpc, alloy_sol_types = alloy::sol_types, alloy_contract =
alloy::contract, all_derives = true, extra_derives(serde::Serialize,
serde::Deserialize))] }
} else {
syn::parse_quote! {
#[sol(rpc, alloy_sol_types = alloy::sol_types, alloy_contract =
alloy::contract)] }
};
file.attrs.push(sol_attr);
expand(file).wrap_err("failed to expand")?
}
_ => unreachable!(),
};
instance.expansion = Some(tokens);
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub fn write_to_crate(
&mut self,
name: &str,
version: &str,
description: &str,
license: &str,
bindings_path: &Path,
single_file: bool,
alloy_version: Option<String>,
alloy_rev: Option<String>,
all_derives: bool,
) -> Result<()> {
self.generate_bindings(all_derives)?;
let src = bindings_path.join("src");
fs::create_dir_all(&src)?;
// Write Cargo.toml
let cargo_toml_path = bindings_path.join("Cargo.toml");
let mut toml_contents = format!(
r#"[package]
name = "{name}"
version = "{version}"
edition = "2021"
"#
);
if !description.is_empty() {
toml_contents.push_str(&format!("description = \"{description}\"\n"));
}
if !license.is_empty() {
let formatted_licenses: Vec<String> =
license.split(',').map(Self::parse_license_alias).collect();
let formatted_license = formatted_licenses.join(" OR ");
toml_contents.push_str(&format!("license = \"{formatted_license}\"\n"));
}
toml_contents.push_str("\n[dependencies]\n");
let alloy_dep = Self::get_alloy_dep(alloy_version, alloy_rev);
write!(toml_contents, "{alloy_dep}")?;
if all_derives {
let serde_dep = r#"serde = { version = "1.0", features = ["derive"] }"#;
write!(toml_contents, "\n{serde_dep}")?;
}
fs::write(cargo_toml_path, toml_contents).wrap_err("Failed to write Cargo.toml")?;
let mut lib_contents = String::new();
write!(
&mut lib_contents,
r#"#![allow(unused_imports, unused_attributes, clippy::all, rustdoc::all)]
//! This module contains the sol! generated bindings for solidity contracts.
//! This is autogenerated code.
//! Do not manually edit these files.
//! These files may be overwritten by the codegen system at any time.
"#
)?;
// Write src
let parse_error = |name: &str| {
format!("failed to parse generated tokens as an AST for {name};\nthis is likely a bug")
};
for instance in &self.instances {
let contents = instance.expansion.as_ref().unwrap();
let name = instance.name.to_snake_case();
let path = src.join(format!("{name}.rs"));
let file = syn::parse2(contents.clone())
.wrap_err_with(|| parse_error(&format!("{}:{}", path.display(), name)))?;
let contents = prettyplease::unparse(&file);
if single_file {
write!(&mut lib_contents, "{contents}")?;
} else {
fs::write(path, contents).wrap_err("failed to write to file")?;
write_mod_name(&mut lib_contents, &name)?;
}
}
let lib_path = src.join("lib.rs");
let lib_file = syn::parse_file(&lib_contents).wrap_err_with(|| parse_error("lib.rs"))?;
let lib_contents = prettyplease::unparse(&lib_file);
fs::write(lib_path, lib_contents).wrap_err("Failed to write lib.rs")?;
Ok(())
}
/// Attempts to detect the appropriate license.
pub fn parse_license_alias(license: &str) -> String {
match license.trim().to_lowercase().as_str() {
"mit" => "MIT".to_string(),
"apache" | "apache2" | "apache20" | "apache2.0" => "Apache-2.0".to_string(),
"gpl" | "gpl3" => "GPL-3.0".to_string(),
"lgpl" | "lgpl3" => "LGPL-3.0".to_string(),
"agpl" | "agpl3" => "AGPL-3.0".to_string(),
"bsd" | "bsd3" => "BSD-3-Clause".to_string(),
"bsd2" => "BSD-2-Clause".to_string(),
"mpl" | "mpl2" => "MPL-2.0".to_string(),
"isc" => "ISC".to_string(),
"unlicense" => "Unlicense".to_string(),
_ => license.trim().to_string(),
}
}
pub fn write_to_module(
&mut self,
bindings_path: &Path,
single_file: bool,
all_derives: bool,
) -> Result<()> {
self.generate_bindings(all_derives)?;
fs::create_dir_all(bindings_path)?;
let mut mod_contents =
r#"#![allow(unused_imports, unused_attributes, clippy::all, rustdoc::all)]
//! This module contains the sol! generated bindings for solidity contracts.
//! This is autogenerated code.
//! Do not manually edit these files.
//! These files may be overwritten by the codegen system at any time.
"#
.to_string();
for instance in &self.instances {
let name = instance.name.to_snake_case();
if !single_file {
// Module
write_mod_name(&mut mod_contents, &name)?;
let mut contents = String::new();
write!(contents, "{}", instance.expansion.as_ref().unwrap())?;
let file = syn::parse_file(&contents)?;
let contents = prettyplease::unparse(&file);
fs::write(bindings_path.join(format!("{name}.rs")), contents)
.wrap_err("Failed to write file")?;
} else {
// Single File
let mut contents = String::new();
write!(contents, "{}\n\n", instance.expansion.as_ref().unwrap())?;
write!(mod_contents, "{contents}")?;
}
}
let mod_path = bindings_path.join("mod.rs");
let mod_file = syn::parse_file(&mod_contents)?;
let mod_contents = prettyplease::unparse(&mod_file);
fs::write(mod_path, mod_contents).wrap_err("Failed to write mod.rs")?;
Ok(())
}
/// Checks that the generated bindings are up to date with the latest version of
/// `sol!`.
///
/// Returns `Ok(())` if the generated bindings are up to date, otherwise it returns
/// `Err(_)`.
#[expect(clippy::too_many_arguments)]
pub fn check_consistency(
&self,
name: &str,
version: &str,
crate_path: &Path,
single_file: bool,
check_cargo_toml: bool,
is_mod: bool,
alloy_version: Option<String>,
alloy_rev: Option<String>,
) -> Result<()> {
if check_cargo_toml && !is_mod {
self.check_cargo_toml(name, version, crate_path, alloy_version, alloy_rev)?;
}
let mut super_contents = String::new();
write!(
&mut super_contents,
r#"#![allow(unused_imports, unused_attributes, clippy::all, rustdoc::all)]
//! This module contains the sol! generated bindings for solidity contracts.
//! This is autogenerated code.
//! Do not manually edit these files.
//! These files may be overwritten by the codegen system at any time.
"#
)?;
if !single_file {
for instance in &self.instances {
let name = instance.name.to_snake_case();
let path = if is_mod {
crate_path.join(format!("{name}.rs"))
} else {
crate_path.join(format!("src/{name}.rs"))
};
let tokens = instance
.expansion
.as_ref()
.ok_or_eyre(format!("TokenStream for {path:?} does not exist"))?
.to_string();
self.check_file_contents(&path, &tokens)?;
write_mod_name(&mut super_contents, &name)?;
}
let super_path =
if is_mod { crate_path.join("mod.rs") } else { crate_path.join("src/lib.rs") };
self.check_file_contents(&super_path, &super_contents)?;
}
Ok(())
}
fn check_file_contents(&self, file_path: &Path, expected_contents: &str) -> Result<()> {
eyre::ensure!(file_path.is_file(), "{} is not a file", file_path.display());
let file_contents = &fs::read_to_string(file_path).wrap_err("Failed to read file")?;
// Format both
let file_contents = syn::parse_file(file_contents)?;
let formatted_file = prettyplease::unparse(&file_contents);
let expected_contents = syn::parse_file(expected_contents)?;
let formatted_exp = prettyplease::unparse(&expected_contents);
eyre::ensure!(
formatted_file == formatted_exp,
"File contents do not match expected contents for {file_path:?}"
);
Ok(())
}
fn check_cargo_toml(
&self,
name: &str,
version: &str,
crate_path: &Path,
alloy_version: Option<String>,
alloy_rev: Option<String>,
) -> Result<()> {
eyre::ensure!(crate_path.is_dir(), "Crate path must be a directory");
let cargo_toml_path = crate_path.join("Cargo.toml");
eyre::ensure!(cargo_toml_path.is_file(), "Cargo.toml must exist");
let cargo_toml_contents =
fs::read_to_string(cargo_toml_path).wrap_err("Failed to read Cargo.toml")?;
let name_check = format!("name = \"{name}\"");
let version_check = format!("version = \"{version}\"");
let alloy_dep_check = Self::get_alloy_dep(alloy_version, alloy_rev);
let toml_consistent = cargo_toml_contents.contains(&name_check)
&& cargo_toml_contents.contains(&version_check)
&& cargo_toml_contents.contains(&alloy_dep_check);
eyre::ensure!(
toml_consistent,
r#"The contents of Cargo.toml do not match the expected output of the latest `sol!` version.
This indicates that the existing bindings are outdated and need to be generated again."#
);
Ok(())
}
/// Returns the `alloy` dependency string for the Cargo.toml file.
/// If `alloy_version` is provided, it will use that version from crates.io.
/// If `alloy_rev` is provided, it will use that revision from the GitHub repository.
fn get_alloy_dep(alloy_version: Option<String>, alloy_rev: Option<String>) -> String {
if let Some(alloy_version) = alloy_version {
format!(
r#"alloy = {{ version = "{alloy_version}", features = ["sol-types", "contract"] }}"#,
)
} else if let Some(alloy_rev) = alloy_rev {
format!(
r#"alloy = {{ git = "https://github.com/alloy-rs/alloy", rev = "{alloy_rev}", features = ["sol-types", "contract"] }}"#,
)
} else {
r#"alloy = { version = "1.0", features = ["sol-types", "contract"] }"#.to_string()
}
}
}
fn write_mod_name(contents: &mut String, name: &str) -> Result<()> {
if syn::parse_str::<syn::Ident>(&format!("pub mod {name};")).is_ok() {
write!(contents, "pub mod {name};")?;
} else {
write!(contents, "pub mod r#{name};")?;
}
Ok(())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/lib.rs | crates/fmt/src/lib.rs | #![doc = include_str!("../README.md")]
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
const DEBUG: bool = false || option_env!("FMT_DEBUG").is_some();
const DEBUG_INDENT: bool = false || option_env!("FMT_DEBUG").is_some();
use foundry_common::comments::{
Comment, Comments,
inline_config::{InlineConfig, InlineConfigItem},
};
mod state;
mod pp;
use solar::{
parse::{
ast::{SourceUnit, Span},
interface::{Session, diagnostics::EmittedDiagnostics, source_map::SourceFile},
},
sema::{Compiler, Gcx, Source},
};
use std::{path::Path, sync::Arc};
pub use foundry_config::fmt::*;
/// The result of the formatter.
pub type FormatterResult = DiagnosticsResult<String, EmittedDiagnostics>;
/// The result of the formatter.
#[derive(Debug)]
pub enum DiagnosticsResult<T, E> {
/// Everything went well.
Ok(T),
/// No errors encountered, but warnings or other non-error diagnostics were emitted.
OkWithDiagnostics(T, E),
/// Errors encountered, but a result was produced anyway.
ErrRecovered(T, E),
/// Fatal errors encountered.
Err(E),
}
impl<T, E> DiagnosticsResult<T, E> {
/// Converts the formatter result into a standard result.
///
/// This ignores any non-error diagnostics if `Ok`, and any valid result if `Err`.
pub fn into_result(self) -> Result<T, E> {
match self {
Self::Ok(s) | Self::OkWithDiagnostics(s, _) => Ok(s),
Self::ErrRecovered(_, d) | Self::Err(d) => Err(d),
}
}
/// Returns the result, even if it was produced with errors.
pub fn into_ok(self) -> Result<T, E> {
match self {
Self::Ok(s) | Self::OkWithDiagnostics(s, _) | Self::ErrRecovered(s, _) => Ok(s),
Self::Err(e) => Err(e),
}
}
/// Returns any result produced.
pub fn ok_ref(&self) -> Option<&T> {
match self {
Self::Ok(s) | Self::OkWithDiagnostics(s, _) | Self::ErrRecovered(s, _) => Some(s),
Self::Err(_) => None,
}
}
/// Returns any diagnostics emitted.
pub fn err_ref(&self) -> Option<&E> {
match self {
Self::Ok(_) => None,
Self::OkWithDiagnostics(_, d) | Self::ErrRecovered(_, d) | Self::Err(d) => Some(d),
}
}
/// Returns `true` if the result is `Ok`.
pub fn is_ok(&self) -> bool {
matches!(self, Self::Ok(_) | Self::OkWithDiagnostics(_, _))
}
/// Returns `true` if the result is `Err`.
pub fn is_err(&self) -> bool {
!self.is_ok()
}
}
pub fn format_file(
path: &Path,
config: Arc<FormatterConfig>,
compiler: &mut Compiler,
) -> FormatterResult {
format_inner(config, compiler, &|sess| {
sess.source_map().load_file(path).map_err(|e| sess.dcx.err(e.to_string()).emit())
})
}
pub fn format_source(
source: &str,
path: Option<&Path>,
config: Arc<FormatterConfig>,
compiler: &mut Compiler,
) -> FormatterResult {
format_inner(config, compiler, &|sess| {
let name = match path {
Some(path) => solar::parse::interface::source_map::FileName::Real(path.to_path_buf()),
None => solar::parse::interface::source_map::FileName::Stdin,
};
sess.source_map()
.new_source_file(name, source)
.map_err(|e| sess.dcx.err(e.to_string()).emit())
})
}
/// Format a string input with the default compiler.
pub fn format(source: &str, config: FormatterConfig) -> FormatterResult {
let mut compiler = Compiler::new(
solar::interface::Session::builder().with_buffer_emitter(Default::default()).build(),
);
format_source(source, None, Arc::new(config), &mut compiler)
}
fn format_inner(
config: Arc<FormatterConfig>,
compiler: &mut Compiler,
mk_file: &(dyn Fn(&Session) -> solar::parse::interface::Result<Arc<SourceFile>> + Sync + Send),
) -> FormatterResult {
// First pass formatting
let first_result = format_once(config.clone(), compiler, mk_file);
// If first pass was not successful, return the result
if first_result.is_err() {
return first_result;
}
let Some(first_formatted) = first_result.ok_ref() else { return first_result };
// Second pass formatting
let second_result = format_once(config, compiler, &|sess| {
// Need a new name since we can't overwrite the original file.
let prev_sf = mk_file(sess)?;
let new_name = match &prev_sf.name {
solar::interface::source_map::FileName::Real(path) => {
path.with_extension("again.sol").into()
}
solar::interface::source_map::FileName::Stdin => {
solar::interface::source_map::FileName::Custom("stdin-again".to_string())
}
solar::interface::source_map::FileName::Custom(name) => {
solar::interface::source_map::FileName::Custom(format!("{name}-again"))
}
};
sess.source_map()
.new_source_file(new_name, first_formatted)
.map_err(|e| sess.dcx.err(e.to_string()).emit())
});
// Check if the two passes produce the same output (idempotency)
match (first_result.ok_ref(), second_result.ok_ref()) {
(Some(first), Some(second)) if first != second => {
panic!("formatter is not idempotent:\n{}", diff(first, second));
}
_ => {}
}
if first_result.is_ok() && second_result.is_err() && !DEBUG {
panic!(
"failed to format a second time:\nfirst_result={first_result:#?}\nsecond_result={second_result:#?}"
);
// second_result
} else {
first_result
}
}
fn diff(first: &str, second: &str) -> impl std::fmt::Display {
use std::fmt::Write;
let diff = similar::TextDiff::from_lines(first, second);
let mut s = String::new();
for change in diff.iter_all_changes() {
let tag = match change.tag() {
similar::ChangeTag::Delete => "-",
similar::ChangeTag::Insert => "+",
similar::ChangeTag::Equal => " ",
};
write!(s, "{tag}{change}").unwrap();
}
s
}
fn format_once(
config: Arc<FormatterConfig>,
compiler: &mut Compiler,
mk_file: &(
dyn Fn(&solar::interface::Session) -> solar::interface::Result<Arc<SourceFile>>
+ Send
+ Sync
),
) -> FormatterResult {
let res = compiler.enter_mut(|c| -> solar::interface::Result<String> {
let mut pcx = c.parse();
pcx.set_resolve_imports(false);
let file = mk_file(c.sess())?;
pcx.add_file(file.clone());
pcx.parse();
c.dcx().has_errors()?;
let gcx = c.gcx();
let (_, source) = gcx.get_ast_source(&file.name).unwrap();
Ok(format_ast(gcx, source, config).expect("unable to format AST"))
});
let diagnostics = compiler.sess().dcx.emitted_diagnostics().unwrap();
match (res, compiler.sess().dcx.has_errors()) {
(Ok(s), Ok(())) if diagnostics.is_empty() => FormatterResult::Ok(s),
(Ok(s), Ok(())) => FormatterResult::OkWithDiagnostics(s, diagnostics),
(Ok(s), Err(_)) => FormatterResult::ErrRecovered(s, diagnostics),
(Err(_), Ok(_)) => unreachable!(),
(Err(_), Err(_)) => FormatterResult::Err(diagnostics),
}
}
// A parallel-safe "worker" function.
pub fn format_ast<'ast>(
gcx: Gcx<'ast>,
source: &'ast Source<'ast>,
config: Arc<FormatterConfig>,
) -> Option<String> {
let comments = Comments::new(
&source.file,
gcx.sess.source_map(),
true,
config.wrap_comments,
if matches!(config.style, IndentStyle::Tab) { Some(config.tab_width) } else { None },
);
let ast = source.ast.as_ref()?;
let inline_config = parse_inline_config(gcx.sess, &comments, ast);
let mut state = state::State::new(gcx.sess.source_map(), config, inline_config, comments);
state.print_source_unit(ast);
Some(state.s.eof())
}
fn parse_inline_config<'ast>(
sess: &Session,
comments: &Comments,
ast: &'ast SourceUnit<'ast>,
) -> InlineConfig<()> {
let parse_item = |mut item: &str, cmnt: &Comment| -> Option<(Span, InlineConfigItem<()>)> {
if let Some(prefix) = cmnt.prefix() {
item = item.strip_prefix(prefix).unwrap_or(item);
}
if let Some(suffix) = cmnt.suffix() {
item = item.strip_suffix(suffix).unwrap_or(item);
}
let item = item.trim_start().strip_prefix("forgefmt:")?.trim();
match item.parse::<InlineConfigItem<()>>() {
Ok(item) => Some((cmnt.span, item)),
Err(e) => {
sess.dcx.warn(e.to_string()).span(cmnt.span).emit();
None
}
}
};
let items = comments.iter().flat_map(|cmnt| {
let mut found_items = Vec::with_capacity(2);
// Always process the first line.
if let Some(line) = cmnt.lines.first()
&& let Some(item) = parse_item(line, cmnt)
{
found_items.push(item);
}
// If the comment has more than one line, process the last line.
if cmnt.lines.len() > 1
&& let Some(line) = cmnt.lines.last()
&& let Some(item) = parse_item(line, cmnt)
{
found_items.push(item);
}
found_items
});
InlineConfig::from_ast(items, ast, sess.source_map())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/pp/helpers.rs | crates/fmt/src/pp/helpers.rs | use super::{Printer, Token};
use std::borrow::Cow;
impl Printer {
pub fn word_space(&mut self, w: impl Into<Cow<'static, str>>) {
self.word(w);
self.space();
}
/// Adds a new hardbreak if not at the beginning of the line.
/// If there was a buffered break token, replaces it (ensures hardbreak) keeping the offset.
pub fn hardbreak_if_not_bol(&mut self) {
if !self.is_bol_or_only_ind() {
if let Some(Token::Break(last)) = self.last_token_still_buffered()
&& last.offset != 0
{
self.replace_last_token_still_buffered(Self::hardbreak_tok_offset(last.offset));
return;
}
self.hardbreak();
}
}
pub fn space_if_not_bol(&mut self) {
if !self.is_bol_or_only_ind() {
self.space();
}
}
pub fn nbsp(&mut self) {
self.word(" ");
}
pub fn space_or_nbsp(&mut self, breaks: bool) {
if breaks {
self.space();
} else {
self.nbsp();
}
}
pub fn word_nbsp(&mut self, w: impl Into<Cow<'static, str>>) {
self.word(w);
self.nbsp();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/pp/convenience.rs | crates/fmt/src/pp/convenience.rs | use super::{BeginToken, BreakToken, Breaks, IndentStyle, Printer, SIZE_INFINITY, Token};
use std::borrow::Cow;
impl Printer {
/// "raw box"
pub fn rbox(&mut self, indent: isize, breaks: Breaks) {
self.scan_begin(BeginToken { indent: IndentStyle::Block { offset: indent }, breaks });
}
/// Inconsistent breaking box
pub fn ibox(&mut self, indent: isize) {
self.rbox(indent, Breaks::Inconsistent);
}
/// Consistent breaking box
pub fn cbox(&mut self, indent: isize) {
self.rbox(indent, Breaks::Consistent);
}
pub fn visual_align(&mut self) {
self.scan_begin(BeginToken { indent: IndentStyle::Visual, breaks: Breaks::Consistent });
}
pub fn break_offset(&mut self, n: usize, off: isize) {
self.scan_break(BreakToken { offset: off, blank_space: n, ..BreakToken::default() });
}
pub fn end(&mut self) {
self.scan_end();
}
pub fn eof(mut self) -> String {
self.scan_eof();
self.out
}
pub fn word(&mut self, w: impl Into<Cow<'static, str>>) {
self.scan_string(w.into());
}
fn spaces(&mut self, n: usize) {
self.break_offset(n, 0);
}
pub fn zerobreak(&mut self) {
self.spaces(0);
}
pub fn space(&mut self) {
self.spaces(1);
}
pub fn hardbreak(&mut self) {
self.spaces(SIZE_INFINITY as usize);
}
pub fn last_token_is_neverbreak(&self) -> bool {
if let Some(token) = self.last_token() {
return token.is_neverbreak();
}
false
}
pub fn last_token_is_break(&self) -> bool {
if let Some(token) = self.last_token() {
return matches!(token, Token::Break(_));
}
false
}
pub fn last_token_is_space(&self) -> bool {
if let Some(token) = self.last_token()
&& token.is_space()
{
return true;
}
self.out.ends_with(" ")
}
pub fn is_beginning_of_line(&self) -> bool {
match self.last_token() {
Some(last_token) => last_token.is_hardbreak(),
None => self.out.is_empty() || self.out.ends_with('\n'),
}
}
/// Attempts to identify whether the current position is:
/// 1. the beginning of a line (empty)
/// 2. a line with only indentation (just whitespaces)
///
/// NOTE: this is still an educated guess, based on a heuristic.
pub fn is_bol_or_only_ind(&self) -> bool {
for i in self.buf.index_range().rev() {
let token = &self.buf[i].token;
if token.is_hardbreak() {
return true;
}
if Self::token_has_non_whitespace_content(token) {
return false;
}
}
let last_line =
if let Some(pos) = self.out.rfind('\n') { &self.out[pos + 1..] } else { &self.out[..] };
last_line.trim().is_empty()
}
fn token_has_non_whitespace_content(token: &Token) -> bool {
match token {
Token::String(s) => !s.trim().is_empty(),
Token::Break(BreakToken { pre_break: Some(s), .. }) => !s.trim().is_empty(),
_ => false,
}
}
pub(crate) fn hardbreak_tok_offset(offset: isize) -> Token {
Token::Break(BreakToken {
offset,
blank_space: SIZE_INFINITY as usize,
..BreakToken::default()
})
}
pub fn hardbreak_if_nonempty(&mut self) {
self.scan_break(BreakToken {
blank_space: SIZE_INFINITY as usize,
if_nonempty: true,
..BreakToken::default()
});
}
pub fn neverbreak(&mut self) {
self.scan_break(BreakToken { never_break: true, ..BreakToken::default() });
}
}
impl Token {
pub(crate) fn is_neverbreak(&self) -> bool {
if let Self::Break(BreakToken { never_break, .. }) = *self {
return never_break;
}
false
}
pub(crate) fn is_hardbreak(&self) -> bool {
if let Self::Break(BreakToken { blank_space, never_break, .. }) = *self {
return blank_space == SIZE_INFINITY as usize && !never_break;
}
false
}
pub(crate) fn is_space(&self) -> bool {
match self {
Self::Break(BreakToken { offset, blank_space, .. }) => {
*offset == 0 && *blank_space == 1
}
Self::String(s) => s.ends_with(' '),
_ => false,
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/pp/ring.rs | crates/fmt/src/pp/ring.rs | use std::{
collections::VecDeque,
ops::{Index, IndexMut, Range},
};
#[derive(Debug)]
pub(crate) struct RingBuffer<T> {
data: VecDeque<T>,
// Abstract index of data[0] in the infinitely sized queue.
offset: usize,
}
impl<T> RingBuffer<T> {
pub(crate) fn new() -> Self {
Self { data: VecDeque::new(), offset: 0 }
}
pub(crate) fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub(crate) fn len(&self) -> usize {
self.data.len()
}
pub(crate) fn push(&mut self, value: T) -> usize {
let index = self.offset + self.data.len();
self.data.push_back(value);
index
}
pub(crate) fn clear(&mut self) {
self.data.clear();
}
pub(crate) fn index_range(&self) -> Range<usize> {
self.offset..self.offset + self.data.len()
}
#[inline]
#[track_caller]
pub(crate) fn first(&self) -> &T {
&self.data[0]
}
#[inline]
#[track_caller]
pub(crate) fn first_mut(&mut self) -> &mut T {
&mut self.data[0]
}
#[inline]
#[track_caller]
pub(crate) fn pop_first(&mut self) -> T {
self.offset += 1;
self.data.pop_front().unwrap()
}
#[inline]
#[track_caller]
pub(crate) fn last(&self) -> &T {
self.data.back().unwrap()
}
#[inline]
#[track_caller]
pub(crate) fn last_mut(&mut self) -> &mut T {
self.data.back_mut().unwrap()
}
#[inline]
#[track_caller]
pub(crate) fn second_last(&self) -> &T {
&self.data[self.data.len() - 2]
}
#[inline]
#[track_caller]
pub(crate) fn pop_last(&mut self) {
self.data.pop_back().unwrap();
}
}
impl<T> Index<usize> for RingBuffer<T> {
type Output = T;
fn index(&self, index: usize) -> &Self::Output {
&self.data[index.checked_sub(self.offset).unwrap()]
}
}
impl<T> IndexMut<usize> for RingBuffer<T> {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.data[index.checked_sub(self.offset).unwrap()]
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/pp/mod.rs | crates/fmt/src/pp/mod.rs | //! Adapted from [`rustc_ast_pretty`](https://github.com/rust-lang/rust/blob/07d3fd1d9b9c1f07475b96a9d168564bf528db68/compiler/rustc_ast_pretty/src/pp.rs)
//! and [`prettyplease`](https://github.com/dtolnay/prettyplease/blob/8eb8c14649aea32e810732bd4d64fe519e6b752a/src/algorithm.rs).
use crate::{DEBUG, DEBUG_INDENT};
use ring::RingBuffer;
use std::{borrow::Cow, cmp, collections::VecDeque, iter};
mod convenience;
mod helpers;
mod ring;
// Every line is allowed at least this much space, even if highly indented.
const MIN_SPACE: isize = 40;
/// How to break. Described in more detail in the module docs.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum Breaks {
Consistent,
Inconsistent,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum IndentStyle {
/// Vertically aligned under whatever column this block begins at.
/// ```ignore
/// fn demo(arg1: usize,
/// arg2: usize) {}
/// ```
Visual,
/// Indented relative to the indentation level of the previous line.
/// ```ignore
/// fn demo(
/// arg1: usize,
/// arg2: usize,
/// ) {}
/// ```
Block { offset: isize },
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub(crate) struct BreakToken {
pub(crate) offset: isize,
pub(crate) blank_space: usize,
pub(crate) pre_break: Option<&'static str>,
pub(crate) post_break: Option<&'static str>,
pub(crate) if_nonempty: bool,
pub(crate) never_break: bool,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) struct BeginToken {
indent: IndentStyle,
breaks: Breaks,
}
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum Token {
// In practice a string token contains either a `&'static str` or a
// `String`. `Cow` is overkill for this because we never modify the data,
// but it's more convenient than rolling our own more specialized type.
String(Cow<'static, str>),
Break(BreakToken),
Begin(BeginToken),
End,
}
#[derive(Copy, Clone, Debug)]
enum PrintFrame {
Fits(Breaks),
Broken(usize, Breaks),
}
pub(crate) const SIZE_INFINITY: isize = 0xffff;
#[derive(Debug)]
pub struct Printer {
out: String,
/// Number of spaces left on line.
space: isize,
/// Ring-buffer of tokens and calculated sizes.
buf: RingBuffer<BufEntry>,
/// Running size of stream "...left".
left_total: isize,
/// Running size of stream "...right".
right_total: isize,
/// Pseudo-stack, really a ring too. Holds the
/// primary-ring-buffers index of the Begin that started the
/// current block, possibly with the most recent Break after that
/// Begin (if there is any) on top of it. Stuff is flushed off the
/// bottom as it becomes irrelevant due to the primary ring-buffer
/// advancing.
scan_stack: VecDeque<usize>,
/// Stack of blocks-in-progress being flushed by print.
print_stack: Vec<PrintFrame>,
/// Level of indentation of current line.
indent: usize,
/// Buffered indentation to avoid writing trailing whitespace.
pending_indentation: usize,
/// The token most recently popped from the left boundary of the
/// ring-buffer for printing.
last_printed: Option<Token>,
/// Target line width.
margin: isize,
/// If `Some(tab_width)` the printer will use tabs for indentation.
indent_config: Option<usize>,
}
#[derive(Debug)]
pub struct BufEntry {
token: Token,
size: isize,
}
impl Printer {
pub fn new(margin: usize, use_tab_with_size: Option<usize>) -> Self {
let margin = (margin as isize).clamp(MIN_SPACE, SIZE_INFINITY - 1);
Self {
out: String::new(),
space: margin,
buf: RingBuffer::new(),
left_total: 0,
right_total: 0,
scan_stack: VecDeque::new(),
print_stack: Vec::new(),
indent: 0,
pending_indentation: 0,
last_printed: None,
margin,
indent_config: use_tab_with_size,
}
}
/// Predicts available space on the current or next line based on pending breaks.
///
/// This function provides a heuristic for estimating available space by checking if
/// an unconditional hard break is pending in the buffer. The printer's internal
/// `self.space` value may not accurately reflect pending formatting decisions.
///
/// # Returns
///
/// - The full `margin` if an unconditional hard break is pending, signaling that a new line
/// will be created. Callers should apply their own indentation logic as they have more
/// semantic context about the code structure.
/// - The current space left (`self.space`) if no hard break is found, which can be trusted when
/// no line breaks are imminent.
///
/// # Trade-offs
///
/// This heuristic may overestimate available space,
/// but provides a reliable signal for hard breaks while keeping the implementation
/// simple.
pub(crate) fn space_left(&self) -> usize {
// Scan backwards through the buffer for the last unconditional hard break.
for i in self.buf.index_range().rev() {
let token = &self.buf[i].token;
if let Token::Break(break_token) = token
&& break_token.blank_space as isize >= SIZE_INFINITY
&& !break_token.never_break
{
return self.margin as usize;
}
// Stop at first non-end token.
if !matches!(token, Token::End) {
break;
}
}
// If no hard break pending, return actual space on current line or the full margin if space
// is negative.
(if self.space < 0 { self.margin } else { self.space }) as usize
}
pub(crate) fn last_token(&self) -> Option<&Token> {
self.last_token_still_buffered().or(self.last_printed.as_ref())
}
pub(crate) fn last_token_still_buffered(&self) -> Option<&Token> {
if self.buf.is_empty() {
return None;
}
Some(&self.buf.last().token)
}
/// Be very careful with this!
pub(crate) fn replace_last_token_still_buffered(&mut self, token: Token) {
self.buf.last_mut().token = token;
}
/// WARNING: Be very careful with this!
///
/// Searches backwards through the buffer to find and replace the last token
/// that satisfies a predicate. This is a specialized and sensitive operation.
///
/// This function's traversal logic is specifically designed to handle cases
/// where formatting boxes have been closed (e.g., after a multi-line
/// comment). It will automatically skip over any trailing `Token::End`
/// tokens to find the substantive token before them.
///
/// The search stops as soon as it encounters any token other than `End`
/// (i.e., a `String`, `Break`, or `Begin`). The provided predicate is then
/// called on that token. If the predicate returns `true`, the token is
/// replaced.
///
/// This function will only ever evaluate the predicate on **one** token.
pub(crate) fn find_and_replace_last_token_still_buffered<F>(
&mut self,
new_token: Token,
predicate: F,
) where
F: FnOnce(&Token) -> bool,
{
for i in self.buf.index_range().rev() {
let token = &self.buf[i].token;
if let Token::End = token {
// It's safe to skip the end of a box.
continue;
}
// Apply the predicate and return after the first non-end token.
if predicate(token) {
self.buf[i].token = new_token;
}
break;
}
}
fn scan_eof(&mut self) {
if !self.scan_stack.is_empty() {
self.check_stack(0);
self.advance_left();
}
}
fn scan_begin(&mut self, token: BeginToken) {
if self.scan_stack.is_empty() {
self.left_total = 1;
self.right_total = 1;
self.buf.clear();
}
let right = self.buf.push(BufEntry { token: Token::Begin(token), size: -self.right_total });
self.scan_stack.push_back(right);
}
fn scan_end(&mut self) {
if self.scan_stack.is_empty() {
self.print_end();
} else {
if !self.buf.is_empty()
&& let Token::Break(break_token) = self.buf.last().token
{
if self.buf.len() >= 2
&& let Token::Begin(_) = self.buf.second_last().token
{
self.buf.pop_last();
self.buf.pop_last();
self.scan_stack.pop_back();
self.scan_stack.pop_back();
self.right_total -= break_token.blank_space as isize;
return;
}
if break_token.if_nonempty {
self.buf.pop_last();
self.scan_stack.pop_back();
self.right_total -= break_token.blank_space as isize;
}
}
let right = self.buf.push(BufEntry { token: Token::End, size: -1 });
self.scan_stack.push_back(right);
}
}
pub(crate) fn scan_break(&mut self, token: BreakToken) {
if self.scan_stack.is_empty() {
self.left_total = 1;
self.right_total = 1;
self.buf.clear();
} else {
self.check_stack(0);
}
let right = self.buf.push(BufEntry { token: Token::Break(token), size: -self.right_total });
self.scan_stack.push_back(right);
self.right_total += token.blank_space as isize;
}
fn scan_string(&mut self, string: Cow<'static, str>) {
if self.scan_stack.is_empty() {
self.print_string(&string);
} else {
let len = string.len() as isize;
self.buf.push(BufEntry { token: Token::String(string), size: len });
self.right_total += len;
self.check_stream();
}
}
#[track_caller]
pub(crate) fn offset(&mut self, offset: isize) {
match &mut self.buf.last_mut().token {
Token::Break(token) => token.offset += offset,
Token::Begin(_) => {}
Token::String(_) | Token::End => unreachable!(),
}
}
pub(crate) fn ends_with(&self, ch: char) -> bool {
for i in self.buf.index_range().rev() {
if let Token::String(token) = &self.buf[i].token {
return token.ends_with(ch);
}
}
self.out.ends_with(ch)
}
fn check_stream(&mut self) {
while self.right_total - self.left_total > self.space {
if *self.scan_stack.front().unwrap() == self.buf.index_range().start {
self.scan_stack.pop_front().unwrap();
self.buf.first_mut().size = SIZE_INFINITY;
}
self.advance_left();
if self.buf.is_empty() {
break;
}
}
}
fn advance_left(&mut self) {
while self.buf.first().size >= 0 {
let left = self.buf.pop_first();
match &left.token {
Token::String(string) => {
self.left_total += left.size;
self.print_string(string);
}
Token::Break(token) => {
self.left_total += token.blank_space as isize;
self.print_break(*token, left.size);
}
Token::Begin(token) => self.print_begin(*token, left.size),
Token::End => self.print_end(),
}
self.last_printed = Some(left.token);
if self.buf.is_empty() {
break;
}
}
}
fn check_stack(&mut self, mut depth: usize) {
while let Some(&index) = self.scan_stack.back() {
let entry = &mut self.buf[index];
match entry.token {
Token::Begin(_) => {
if depth == 0 {
break;
}
self.scan_stack.pop_back().unwrap();
entry.size += self.right_total;
depth -= 1;
}
Token::End => {
// paper says + not =, but that makes no sense.
self.scan_stack.pop_back().unwrap();
entry.size = 1;
depth += 1;
}
_ => {
self.scan_stack.pop_back().unwrap();
entry.size += self.right_total;
if depth == 0 {
break;
}
}
}
}
}
fn get_top(&self) -> PrintFrame {
self.print_stack.last().copied().unwrap_or(PrintFrame::Broken(0, Breaks::Inconsistent))
}
fn print_begin(&mut self, token: BeginToken, size: isize) {
if DEBUG {
self.out.push(match token.breaks {
Breaks::Consistent => '«',
Breaks::Inconsistent => '‹',
});
if DEBUG_INDENT && let IndentStyle::Block { offset } = token.indent {
self.out.extend(offset.to_string().chars().map(|ch| match ch {
'0'..='9' => ['₀', '₁', '₂', '₃', '₄', '₅', '₆', '₇', '₈', '₉']
[(ch as u8 - b'0') as usize],
'-' => '₋',
_ => unreachable!(),
}));
}
}
if size > self.space {
self.print_stack.push(PrintFrame::Broken(self.indent, token.breaks));
self.indent = match token.indent {
IndentStyle::Block { offset } => {
usize::try_from(self.indent as isize + offset).unwrap()
}
IndentStyle::Visual => (self.margin - self.space) as usize,
};
} else {
self.print_stack.push(PrintFrame::Fits(token.breaks));
}
}
fn print_end(&mut self) {
let breaks = match self.print_stack.pop().unwrap() {
PrintFrame::Broken(indent, breaks) => {
self.indent = indent;
breaks
}
PrintFrame::Fits(breaks) => breaks,
};
if DEBUG {
self.out.push(match breaks {
Breaks::Consistent => '»',
Breaks::Inconsistent => '›',
});
}
}
fn print_break(&mut self, token: BreakToken, size: isize) {
let fits = token.never_break
|| match self.get_top() {
PrintFrame::Fits(..) => true,
PrintFrame::Broken(.., Breaks::Consistent) => false,
PrintFrame::Broken(.., Breaks::Inconsistent) => size <= self.space,
};
if fits {
self.pending_indentation += token.blank_space;
self.space -= token.blank_space as isize;
if DEBUG {
self.out.push('·');
}
} else {
if let Some(pre_break) = token.pre_break {
self.print_indent();
self.out.push_str(pre_break);
}
if DEBUG {
self.out.push('·');
}
self.out.push('\n');
let indent = self.indent as isize + token.offset;
self.pending_indentation = usize::try_from(indent).expect("negative indentation");
self.space = cmp::max(self.margin - indent, MIN_SPACE);
if let Some(post_break) = token.post_break {
self.print_indent();
self.out.push_str(post_break);
self.space -= post_break.len() as isize;
}
}
}
fn print_string(&mut self, string: &str) {
self.print_indent();
self.out.push_str(string);
self.space -= string.len() as isize;
}
fn print_indent(&mut self) {
self.out.reserve(self.pending_indentation);
if let Some(tab_width) = self.indent_config {
let num_tabs = self.pending_indentation / tab_width;
self.out.extend(iter::repeat_n('\t', num_tabs));
let remainder = self.pending_indentation % tab_width;
self.out.extend(iter::repeat_n(' ', remainder));
} else {
self.out.extend(iter::repeat_n(' ', self.pending_indentation));
}
self.pending_indentation = 0;
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/state/sol.rs | crates/fmt/src/state/sol.rs | #![allow(clippy::too_many_arguments)]
use super::{
CommentConfig, Separator, State,
common::{BlockFormat, ListFormat},
};
use crate::{
pp::SIZE_INFINITY,
state::{CallContext, common::LitExt},
};
use foundry_common::{comments::Comment, iter::IterDelimited};
use foundry_config::fmt::{self as config, MultilineFuncHeaderStyle};
use solar::{
ast::BoxSlice,
interface::SpannedOption,
parse::{
ast::{self, Span},
interface::BytePos,
},
};
use std::{collections::HashMap, fmt::Debug};
#[rustfmt::skip]
macro_rules! get_span {
() => { |value| value.span };
(()) => { |value| value.span() };
}
/// Language-specific pretty printing: Solidity.
impl<'ast> State<'_, 'ast> {
pub(crate) fn print_source_unit(&mut self, source_unit: &'ast ast::SourceUnit<'ast>) {
// Figure out if the cursor needs to check for CR (`\r`).
if let Some(item) = source_unit.items.first() {
self.check_crlf(item.span.to(source_unit.items.last().unwrap().span));
}
let mut items = source_unit.items.iter().peekable();
let mut is_first = true;
while let Some(item) = items.next() {
// If imports shouldn't be sorted, or if the item is not an import, print it directly.
if !self.config.sort_imports || !matches!(item.kind, ast::ItemKind::Import(_)) {
self.print_item(item, is_first);
is_first = false;
if let Some(next_item) = items.peek() {
self.separate_items(next_item, false);
}
continue;
}
// Otherwise, collect a group of consecutive imports and sort them before printing.
let mut import_group = vec![item];
while let Some(next_item) = items.peek() {
// Groups end when the next item is not an import or when there is a blank line.
if !matches!(next_item.kind, ast::ItemKind::Import(_))
|| self.has_comment_between(item.span.hi(), next_item.span.lo())
{
break;
}
import_group.push(items.next().unwrap());
}
import_group.sort_by_key(|item| {
if let ast::ItemKind::Import(import) = &item.kind {
import.path.value.as_str()
} else {
unreachable!("Expected an import item")
}
});
for (pos, group_item) in import_group.iter().delimited() {
self.print_item(group_item, is_first);
is_first = false;
if !pos.is_last {
self.hardbreak_if_not_bol();
}
}
if let Some(next_item) = items.peek() {
self.separate_items(next_item, false);
}
}
self.print_remaining_comments(is_first);
}
/// Prints a hardbreak if the item needs an isolated line break.
fn separate_items(&mut self, next_item: &'ast ast::Item<'ast>, advance: bool) {
if !item_needs_iso(&next_item.kind) {
return;
}
let span = next_item.span;
let cmnts = self
.comments
.iter()
.filter_map(|c| if c.pos() < span.lo() { Some(c.style) } else { None })
.collect::<Vec<_>>();
if let Some(first) = cmnts.first()
&& let Some(last) = cmnts.last()
{
if !(first.is_blank() || last.is_blank()) {
self.hardbreak();
return;
}
if advance {
if self.peek_comment_before(span.lo()).is_some() {
self.print_comments(span.lo(), CommentConfig::default());
} else if self.inline_config.is_disabled(span.shrink_to_lo()) {
self.hardbreak();
self.cursor.advance_to(span.lo(), true);
}
}
} else {
self.hardbreak();
}
}
fn print_item(&mut self, item: &'ast ast::Item<'ast>, skip_ws: bool) {
let ast::Item { ref docs, span, ref kind } = *item;
self.print_docs(docs);
if self.handle_span(item.span, skip_ws) {
if !self.print_trailing_comment(span.hi(), None) {
self.print_sep(Separator::Hardbreak);
}
return;
}
if self
.print_comments(
span.lo(),
if skip_ws {
CommentConfig::skip_leading_ws(false)
} else {
CommentConfig::default()
},
)
.is_some_and(|cmnt| cmnt.is_mixed())
{
self.zerobreak();
}
match kind {
ast::ItemKind::Pragma(pragma) => self.print_pragma(pragma),
ast::ItemKind::Import(import) => self.print_import(import),
ast::ItemKind::Using(using) => self.print_using(using),
ast::ItemKind::Contract(contract) => self.print_contract(contract, span),
ast::ItemKind::Function(func) => self.print_function(func),
ast::ItemKind::Variable(var) => self.print_var_def(var),
ast::ItemKind::Struct(strukt) => self.print_struct(strukt, span),
ast::ItemKind::Enum(enm) => self.print_enum(enm, span),
ast::ItemKind::Udvt(udvt) => self.print_udvt(udvt),
ast::ItemKind::Error(err) => self.print_error(err),
ast::ItemKind::Event(event) => self.print_event(event),
}
self.cursor.advance_to(span.hi(), true);
self.print_comments(span.hi(), CommentConfig::default());
self.print_trailing_comment(span.hi(), None);
self.hardbreak_if_not_bol();
self.cursor.next_line(self.is_at_crlf());
}
fn print_pragma(&mut self, pragma: &'ast ast::PragmaDirective<'ast>) {
self.word("pragma ");
match &pragma.tokens {
ast::PragmaTokens::Version(ident, semver_req) => {
self.print_ident(ident);
self.nbsp();
self.word(semver_req.to_string());
}
ast::PragmaTokens::Custom(a, b) => {
self.print_ident_or_strlit(a);
if let Some(b) = b {
self.nbsp();
self.print_ident_or_strlit(b);
}
}
ast::PragmaTokens::Verbatim(tokens) => {
self.print_tokens(tokens);
}
}
self.word(";");
}
fn print_commasep_aliases<'a, I>(&mut self, aliases: I)
where
I: Iterator<Item = &'a (ast::Ident, Option<ast::Ident>)>,
'ast: 'a,
{
for (pos, (ident, alias)) in aliases.delimited() {
self.print_ident(ident);
if let Some(alias) = alias {
self.word(" as ");
self.print_ident(alias);
}
if !pos.is_last {
self.word(",");
self.space();
}
}
}
fn print_import(&mut self, import: &'ast ast::ImportDirective<'ast>) {
let ast::ImportDirective { path, items } = import;
self.word("import ");
match items {
ast::ImportItems::Plain(_) | ast::ImportItems::Glob(_) => {
self.print_ast_str_lit(path);
if let Some(ident) = items.source_alias() {
self.word(" as ");
self.print_ident(&ident);
}
}
ast::ImportItems::Aliases(aliases) => {
// Check if we should keep single imports on one line
let use_single_line = self.config.single_line_imports && aliases.len() == 1;
if use_single_line {
self.word("{");
if self.config.bracket_spacing {
self.nbsp();
}
} else {
self.s.cbox(self.ind);
self.word("{");
self.braces_break();
}
if self.config.sort_imports {
let mut sorted: Vec<_> = aliases.iter().collect();
sorted.sort_by_key(|(ident, _alias)| ident.name.as_str());
self.print_commasep_aliases(sorted.into_iter());
} else {
self.print_commasep_aliases(aliases.iter());
};
if use_single_line {
if self.config.bracket_spacing {
self.nbsp();
}
self.word("}");
} else {
self.braces_break();
self.s.offset(-self.ind);
self.word("}");
self.end();
}
self.word(" from ");
self.print_ast_str_lit(path);
}
}
self.word(";");
}
fn print_using(&mut self, using: &'ast ast::UsingDirective<'ast>) {
let ast::UsingDirective { list, ty, global } = using;
self.word("using ");
match list {
ast::UsingList::Single(path) => self.print_path(path, true),
ast::UsingList::Multiple(items) => {
self.s.cbox(self.ind);
self.word("{");
self.braces_break();
for (pos, (path, op)) in items.iter().delimited() {
self.print_path(path, true);
if let Some(op) = op {
self.word(" as ");
self.word(op.to_str());
}
if !pos.is_last {
self.word(",");
self.space();
}
}
self.braces_break();
self.s.offset(-self.ind);
self.word("}");
self.end();
}
}
self.word(" for ");
if let Some(ty) = ty {
self.print_ty(ty);
} else {
self.word("*");
}
if *global {
self.word(" global");
}
self.word(";");
}
fn print_contract(&mut self, c: &'ast ast::ItemContract<'ast>, span: Span) {
let ast::ItemContract { kind, name, layout, bases, body } = c;
self.contract = Some(c);
self.cursor.advance_to(span.lo(), true);
self.s.cbox(self.ind);
self.ibox(0);
self.cbox(0);
self.word_nbsp(kind.to_str());
self.print_ident(name);
self.nbsp();
if let Some(layout) = layout
&& !self.handle_span(layout.span, false)
{
self.word("layout at ");
self.print_expr(layout.slot);
self.print_sep(Separator::Space);
}
if let Some(first) = bases.first().map(|base| base.span())
&& let Some(last) = bases.last().map(|base| base.span())
&& self.inline_config.is_disabled(first.to(last))
{
_ = self.handle_span(first.until(last), false);
} else if !bases.is_empty() {
self.word("is");
self.space();
let last = bases.len() - 1;
for (i, base) in bases.iter().enumerate() {
if !self.handle_span(base.span(), false) {
self.print_modifier_call(base, false);
if i != last {
self.word(",");
if self
.print_comments(
bases[i + 1].span().lo(),
CommentConfig::skip_ws().mixed_prev_space().mixed_post_nbsp(),
)
.is_none()
{
self.space();
}
}
}
}
if !self.print_trailing_comment(bases.last().unwrap().span().hi(), None) {
self.space();
}
self.s.offset(-self.ind);
}
self.end();
self.print_word("{");
self.end();
if !body.is_empty() {
// update block depth
self.block_depth += 1;
self.print_sep(Separator::Hardbreak);
if self.config.contract_new_lines {
self.hardbreak();
}
let body_lo = body[0].span.lo();
if self.peek_comment_before(body_lo).is_some() {
self.print_comments(body_lo, CommentConfig::skip_leading_ws(true));
}
let mut is_first = true;
let mut items = body.iter().peekable();
while let Some(item) = items.next() {
self.print_item(item, is_first);
is_first = false;
if let Some(next_item) = items.peek() {
if self.inline_config.is_disabled(next_item.span) {
_ = self.handle_span(next_item.span, false);
} else {
self.separate_items(next_item, true);
}
}
}
if let Some(cmnt) = self.print_comments(span.hi(), CommentConfig::skip_trailing_ws())
&& self.config.contract_new_lines
&& !cmnt.is_blank()
{
self.print_sep(Separator::Hardbreak);
}
self.s.offset(-self.ind);
self.end();
if self.config.contract_new_lines {
self.hardbreak_if_nonempty();
}
// restore block depth
self.block_depth -= 1;
} else {
if self.print_comments(span.hi(), CommentConfig::skip_ws()).is_some() {
self.zerobreak();
} else if self.config.bracket_spacing {
self.nbsp();
};
self.end();
}
self.print_word("}");
self.cursor.advance_to(span.hi(), true);
self.contract = None;
}
fn print_struct(&mut self, strukt: &'ast ast::ItemStruct<'ast>, span: Span) {
let ast::ItemStruct { name, fields } = strukt;
let ind = if self.estimate_size(name.span) + 8 >= self.space_left() { self.ind } else { 0 };
self.s.ibox(self.ind);
self.word("struct");
self.space();
self.print_ident(name);
self.word(" {");
if !fields.is_empty() {
self.break_offset(SIZE_INFINITY as usize, ind);
}
self.s.ibox(0);
for var in fields.iter() {
self.print_var_def(var);
if !self.print_trailing_comment(var.span.hi(), None) {
self.hardbreak();
}
}
self.print_comments(span.hi(), CommentConfig::skip_ws());
if ind == 0 {
self.s.offset(-self.ind);
}
self.end();
self.end();
self.word("}");
}
fn print_enum(&mut self, enm: &'ast ast::ItemEnum<'ast>, span: Span) {
let ast::ItemEnum { name, variants } = enm;
self.s.cbox(self.ind);
self.word("enum ");
self.print_ident(name);
self.word(" {");
self.hardbreak_if_nonempty();
for (pos, ident) in variants.iter().delimited() {
self.print_comments(ident.span.lo(), CommentConfig::default());
self.print_ident(ident);
if !pos.is_last {
self.word(",");
}
if !self.print_trailing_comment(ident.span.hi(), None) {
self.hardbreak();
}
}
self.print_comments(span.hi(), CommentConfig::skip_ws());
self.s.offset(-self.ind);
self.end();
self.word("}");
}
fn print_udvt(&mut self, udvt: &'ast ast::ItemUdvt<'ast>) {
let ast::ItemUdvt { name, ty } = udvt;
self.word("type ");
self.print_ident(name);
self.word(" is ");
self.print_ty(ty);
self.word(";");
}
// NOTE(rusowsky): Functions are the only source unit item that handle inline (disabled) format
fn print_function(&mut self, func: &'ast ast::ItemFunction<'ast>) {
let ast::ItemFunction { kind, ref header, ref body, body_span } = *func;
let ast::FunctionHeader {
name,
ref parameters,
visibility,
state_mutability: sm,
virtual_,
ref override_,
ref returns,
..
} = *header;
self.s.cbox(self.ind);
// Print fn name and params
_ = self.handle_span(self.cursor.span(header.span.lo()), false);
self.print_word(kind.to_str());
if let Some(name) = name {
self.print_sep(Separator::Nbsp);
self.print_ident(&name);
self.cursor.advance_to(name.span.hi(), true);
}
self.s.cbox(-self.ind);
let header_style = self.config.multiline_func_header;
let params_format = match header_style {
MultilineFuncHeaderStyle::ParamsAlways => ListFormat::always_break(),
MultilineFuncHeaderStyle::All
if header.parameters.len() > 1 && !self.can_header_be_inlined(func) =>
{
ListFormat::always_break()
}
MultilineFuncHeaderStyle::AllParams
if !header.parameters.is_empty() && !self.can_header_be_inlined(func) =>
{
ListFormat::always_break()
}
_ => ListFormat::consistent().break_cmnts().break_single(
// ensure fn params are always breakable when there is a single `Contract.Struct`
parameters.len() == 1
&& matches!(
¶meters[0].ty,
ast::Type { kind: ast::TypeKind::Custom(ty), .. } if ty.segments().len() > 1
),
),
};
self.print_parameter_list(parameters, parameters.span, params_format);
self.end();
// Map attributes to their corresponding comments
let (mut map, attributes, first_attrib_pos) =
AttributeCommentMapper::new(returns.as_ref(), body_span.lo()).build(self, header);
let mut handle_pre_cmnts = |this: &mut Self, span: Span| -> bool {
if this.inline_config.is_disabled(span)
// Note: `map` is still captured from the outer scope, which is fine.
&& let Some((pre_cmnts, ..)) = map.remove(&span.lo())
{
for (pos, cmnt) in pre_cmnts.into_iter().delimited() {
if pos.is_first && cmnt.style.is_isolated() && !this.is_bol_or_only_ind() {
this.print_sep(Separator::Hardbreak);
}
if let Some(cmnt) = this.handle_comment(cmnt, false) {
this.print_comment(cmnt, CommentConfig::skip_ws().mixed_post_nbsp());
}
if pos.is_last {
return true;
}
}
}
false
};
let skip_attribs = returns.as_ref().is_some_and(|ret| {
let attrib_span = Span::new(first_attrib_pos, ret.span.lo());
handle_pre_cmnts(self, attrib_span);
self.handle_span(attrib_span, false)
});
let skip_returns = {
let pos = if skip_attribs { self.cursor.pos } else { first_attrib_pos };
let ret_span = Span::new(pos, body_span.lo());
handle_pre_cmnts(self, ret_span);
self.handle_span(ret_span, false)
};
let attrib_box = self.config.multiline_func_header.params_first()
|| (self.config.multiline_func_header.attrib_first()
&& !self.can_header_params_be_inlined(func));
if attrib_box {
self.s.cbox(0);
}
if !(skip_attribs || skip_returns) {
// Print fn attributes in correct order
if let Some(v) = visibility {
self.print_fn_attribute(v.span, &mut map, &mut |s| s.word(v.to_str()));
}
if let Some(sm) = sm
&& !matches!(*sm, ast::StateMutability::NonPayable)
{
self.print_fn_attribute(sm.span, &mut map, &mut |s| s.word(sm.to_str()));
}
if let Some(v) = virtual_ {
self.print_fn_attribute(v, &mut map, &mut |s| s.word("virtual"));
}
if let Some(o) = override_ {
self.print_fn_attribute(o.span, &mut map, &mut |s| s.print_override(o));
}
for m in attributes.iter().filter(|a| matches!(a.kind, AttributeKind::Modifier(_))) {
if let AttributeKind::Modifier(modifier) = m.kind {
let is_base = self.is_modifier_a_base_contract(kind, modifier);
self.print_fn_attribute(m.span, &mut map, &mut |s| {
s.print_modifier_call(modifier, is_base)
});
}
}
}
if !skip_returns
&& let Some(ret) = returns
&& !ret.is_empty()
{
if !self.handle_span(self.cursor.span(ret.span.lo()), false) {
if !self.is_bol_or_only_ind() && !self.last_token_is_space() {
self.print_sep(Separator::Space);
}
self.cursor.advance_to(ret.span.lo(), true);
self.print_word("returns ");
}
self.print_parameter_list(
ret,
ret.span,
ListFormat::consistent(), // .with_cmnts_break(false),
);
}
// Print fn body
if let Some(body) = body {
if self.handle_span(self.cursor.span(body_span.lo()), false) {
// Print spacing if necessary. Updates cursor.
} else {
if let Some(cmnt) = self.peek_comment_before(body_span.lo()) {
if cmnt.style.is_mixed() {
// These shouldn't update the cursor, as we've already dealt with it above
self.space();
self.s.offset(-self.ind);
self.print_comments(body_span.lo(), CommentConfig::skip_ws());
} else {
self.zerobreak();
self.s.offset(-self.ind);
self.print_comments(body_span.lo(), CommentConfig::skip_ws());
self.s.offset(-self.ind);
}
} else {
// If there are no modifiers, overrides, nor returns never break
if header.modifiers.is_empty()
&& header.override_.is_none()
&& returns.as_ref().is_none_or(|r| r.is_empty())
&& (header.visibility().is_none() || body.is_empty())
{
self.nbsp();
} else {
self.space();
self.s.offset(-self.ind);
}
}
self.cursor.advance_to(body_span.lo(), true);
}
self.print_word("{");
self.end();
if attrib_box {
self.end();
}
self.print_block_without_braces(body, body_span.hi(), Some(self.ind));
if self.cursor.enabled || self.cursor.pos < body_span.hi() {
self.print_word("}");
self.cursor.advance_to(body_span.hi(), true);
}
} else {
self.print_comments(body_span.lo(), CommentConfig::skip_ws().mixed_prev_space());
self.end();
if attrib_box {
self.end();
}
self.neverbreak();
self.print_word(";");
}
if let Some(cmnt) = self.peek_trailing_comment(body_span.hi(), None) {
if cmnt.is_doc {
// trailing doc comments after the fn body are isolated
// these shouldn't update the cursor, as this is our own formatting
self.hardbreak();
self.hardbreak();
}
self.print_trailing_comment(body_span.hi(), None);
}
}
fn print_fn_attribute(
&mut self,
span: Span,
map: &mut AttributeCommentMap,
print_fn: &mut dyn FnMut(&mut Self),
) {
match map.remove(&span.lo()) {
Some((pre_cmnts, inner_cmnts, post_cmnts)) => {
// Print preceding comments.
for cmnt in pre_cmnts {
let Some(cmnt) = self.handle_comment(cmnt, false) else {
continue;
};
self.print_comment(cmnt, CommentConfig::default());
}
// Push the inner comments back to the queue, so that they are printed in their
// intended place.
for cmnt in inner_cmnts.into_iter().rev() {
self.comments.push_front(cmnt);
}
let mut enabled = false;
if !self.handle_span(span, false) {
if !self.is_bol_or_only_ind() {
self.space();
}
self.ibox(0);
print_fn(self);
self.cursor.advance_to(span.hi(), true);
enabled = true;
}
// Print subsequent comments.
for cmnt in post_cmnts {
let Some(cmnt) = self.handle_comment(cmnt, false) else {
continue;
};
self.print_comment(cmnt, CommentConfig::default().mixed_prev_space());
}
if enabled {
self.end();
}
}
// Fallback for attributes not in the map (should never happen)
None => {
if !self.is_bol_or_only_ind() {
self.space();
}
print_fn(self);
self.cursor.advance_to(span.hi(), true);
}
}
}
fn is_modifier_a_base_contract(
&self,
kind: ast::FunctionKind,
modifier: &'ast ast::Modifier<'ast>,
) -> bool {
// Add `()` in functions when the modifier is a base contract.
// HACK: heuristics:
// 1. exactly matches the name of a base contract as declared in the `contract is`;
// this does not account for inheritance;
let is_contract_base = self.contract.is_some_and(|contract| {
contract
.bases
.iter()
.any(|contract_base| contract_base.name.to_string() == modifier.name.to_string())
});
// 2. assume that title case names in constructors are bases.
// LEGACY: constructors used to also be `function NameOfContract...`; not checked.
let is_constructor = matches!(kind, ast::FunctionKind::Constructor);
// LEGACY: we are checking the beginning of the path, not the last segment.
is_contract_base
|| (is_constructor
&& modifier.name.first().name.as_str().starts_with(char::is_uppercase))
}
fn print_error(&mut self, err: &'ast ast::ItemError<'ast>) {
let ast::ItemError { name, parameters } = err;
self.word("error ");
self.print_ident(name);
self.print_parameter_list(
parameters,
parameters.span,
if self.config.prefer_compact.errors() {
ListFormat::compact()
} else {
ListFormat::consistent()
},
);
self.word(";");
}
fn print_event(&mut self, event: &'ast ast::ItemEvent<'ast>) {
let ast::ItemEvent { name, parameters, anonymous } = event;
self.word("event ");
self.print_ident(name);
self.print_parameter_list(
parameters,
parameters.span,
if self.config.prefer_compact.events() {
ListFormat::compact().break_cmnts()
} else {
ListFormat::consistent().break_cmnts()
},
);
if *anonymous {
self.word(" anonymous");
}
self.word(";");
}
fn print_var_def(&mut self, var: &'ast ast::VariableDefinition<'ast>) {
self.print_var(var, true);
self.word(";");
}
/// Prints the RHS of an assignment or variable initializer.
fn print_assign_rhs(
&mut self,
rhs: &'ast ast::Expr<'ast>,
lhs_size: usize,
space_left: usize,
ty: Option<&ast::TypeKind<'ast>>,
cache: bool,
) {
// Check if the total expression overflows but the RHS would fit alone on a new line.
// This helps keep the RHS together on a single line when possible.
let rhs_size = self.estimate_size(rhs.span);
let overflows = lhs_size + rhs_size >= space_left;
let fits_alone = rhs_size + self.config.tab_width < space_left;
let fits_alone_no_cmnts =
fits_alone && !self.has_comment_between(rhs.span.lo(), rhs.span.hi());
let force_break = overflows && fits_alone_no_cmnts;
// Set up precall size tracking
if lhs_size <= space_left {
self.neverbreak();
self.call_stack.add_precall(lhs_size + 1);
} else {
self.call_stack.add_precall(space_left + self.config.tab_width);
}
// Handle comments before the RHS expression
if let Some(cmnt) = self.peek_comment_before(rhs.span.lo())
&& self.inline_config.is_disabled(cmnt.span)
{
self.print_sep(Separator::Nbsp);
}
if self
.print_comments(
rhs.span.lo(),
CommentConfig::skip_ws().mixed_no_break().mixed_prev_space(),
)
.is_some_and(|cmnt| cmnt.is_trailing())
{
self.break_offset_if_not_bol(SIZE_INFINITY as usize, self.ind, false);
}
// Match on expression kind to determine formatting strategy
match &rhs.kind {
ast::ExprKind::Lit(lit, ..) if lit.is_str_concatenation() => {
// String concatenations stay on the same line with nbsp
self.print_sep(Separator::Nbsp);
self.neverbreak();
self.s.ibox(self.ind);
self.print_expr(rhs);
self.end();
}
ast::ExprKind::Lit(..) if ty.is_none() && !fits_alone => {
// Long string in assign expr goes on its own line
self.print_sep(Separator::Space);
self.s.offset(self.ind);
self.print_expr(rhs);
}
ast::ExprKind::Binary(lhs, op, _) => {
let print_inline = |this: &mut Self| {
this.print_sep(Separator::Nbsp);
this.neverbreak();
this.print_expr(rhs);
};
let print_with_break = |this: &mut Self, force_break: bool| {
if !this.is_bol_or_only_ind() {
if force_break {
this.print_sep(Separator::Hardbreak);
} else {
this.print_sep(Separator::Space);
}
}
this.s.offset(this.ind);
this.s.ibox(this.ind);
this.print_expr(rhs);
this.end();
};
// Binary expressions: check if we need to break and indent
if force_break {
print_with_break(self, true);
} else if self.estimate_lhs_size(rhs, op) + lhs_size > space_left {
if has_complex_successor(&rhs.kind, true)
&& get_callee_head_size(lhs) + lhs_size <= space_left
{
// Keep complex exprs (where callee fits) inline, as they will have breaks
if matches!(lhs.kind, ast::ExprKind::Call(..)) {
self.s.ibox(-self.ind);
print_inline(self);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/state/mod.rs | crates/fmt/src/state/mod.rs | #![allow(clippy::too_many_arguments)]
use crate::{
FormatterConfig, InlineConfig,
pp::{self, BreakToken, SIZE_INFINITY, Token},
state::sol::BinOpGroup,
};
use foundry_common::{
comments::{Comment, CommentStyle, Comments, estimate_line_width, line_with_tabs},
iter::IterDelimited,
};
use foundry_config::fmt::{DocCommentStyle, IndentStyle};
use solar::parse::{
ast::{self, Span},
interface::{BytePos, SourceMap},
token,
};
use std::{borrow::Cow, ops::Deref, sync::Arc};
mod common;
mod sol;
mod yul;
/// Specifies the nature of a complex call.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(super) enum CallContextKind {
/// A chained method call, `a().b()`.
Chained,
/// A nested function call, `a(b())`.
Nested,
}
/// Formatting context for a call expression.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(super) struct CallContext {
/// The kind call.
pub(super) kind: CallContextKind,
/// The size of the callee's head, excluding its arguments.
pub(super) size: usize,
}
impl CallContext {
pub(super) fn nested(size: usize) -> Self {
Self { kind: CallContextKind::Nested, size }
}
pub(super) fn chained(size: usize) -> Self {
Self { kind: CallContextKind::Chained, size }
}
pub(super) fn is_nested(&self) -> bool {
matches!(self.kind, CallContextKind::Nested)
}
pub(super) fn is_chained(&self) -> bool {
matches!(self.kind, CallContextKind::Chained)
}
}
#[derive(Debug, Default)]
pub(super) struct CallStack {
stack: Vec<CallContext>,
precall_size: usize,
}
impl Deref for CallStack {
type Target = [CallContext];
fn deref(&self) -> &Self::Target {
&self.stack
}
}
impl CallStack {
pub(crate) fn push(&mut self, call: CallContext) {
self.stack.push(call);
}
pub(crate) fn pop(&mut self) -> Option<CallContext> {
self.stack.pop()
}
pub(crate) fn add_precall(&mut self, size: usize) {
self.precall_size += size;
}
pub(crate) fn reset_precall(&mut self) {
self.precall_size = 0;
}
pub(crate) fn is_nested(&self) -> bool {
self.last().is_some_and(|call| call.is_nested())
}
pub(crate) fn is_chain(&self) -> bool {
self.last().is_some_and(|call| call.is_chained())
}
}
pub(super) struct State<'sess, 'ast> {
// CORE COMPONENTS
pub(super) s: pp::Printer,
ind: isize,
sm: &'sess SourceMap,
pub(super) comments: Comments,
config: Arc<FormatterConfig>,
inline_config: InlineConfig<()>,
cursor: SourcePos,
// FORMATTING CONTEXT:
// Whether the source file uses CRLF (`\r\n`) line endings.
has_crlf: bool,
// The current contract being formatted, if inside a contract definition.
contract: Option<&'ast ast::ItemContract<'ast>>,
// Current block nesting depth (incremented for each `{...}` block entered).
block_depth: usize,
// Stack tracking nested and chained function calls.
call_stack: CallStack,
// Whether the current statement should be formatted as a single line, or not.
single_line_stmt: Option<bool>,
// The current binary expression chain context, if inside one.
binary_expr: Option<BinOpGroup>,
// Whether inside a `return` statement that contains a binary expression, or not.
return_bin_expr: bool,
// Whether inside a call with call options and at least one argument.
call_with_opts_and_args: bool,
// Whether to skip the index soft breaks because the callee fits inline.
skip_index_break: bool,
// Whether inside an `emit` or `revert` call with a qualified path, or not.
emit_or_revert: bool,
// Whether inside a variable initialization expression, or not.
var_init: bool,
}
impl std::ops::Deref for State<'_, '_> {
type Target = pp::Printer;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.s
}
}
impl std::ops::DerefMut for State<'_, '_> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.s
}
}
struct SourcePos {
pos: BytePos,
enabled: bool,
}
impl SourcePos {
pub(super) fn advance(&mut self, bytes: u32) {
self.pos += BytePos(bytes);
}
pub(super) fn advance_to(&mut self, pos: BytePos, enabled: bool) {
self.pos = std::cmp::max(pos, self.pos);
self.enabled = enabled;
}
pub(super) fn next_line(&mut self, is_at_crlf: bool) {
self.pos += if is_at_crlf { 2 } else { 1 };
}
pub(super) fn span(&self, to: BytePos) -> Span {
Span::new(self.pos, to)
}
}
pub(super) enum Separator {
Nbsp,
Space,
Hardbreak,
SpaceOrNbsp(bool),
}
impl Separator {
fn print(&self, p: &mut pp::Printer, cursor: &mut SourcePos, is_at_crlf: bool) {
match self {
Self::Nbsp => p.nbsp(),
Self::Space => p.space(),
Self::Hardbreak => p.hardbreak(),
Self::SpaceOrNbsp(breaks) => p.space_or_nbsp(*breaks),
}
cursor.next_line(is_at_crlf);
}
}
/// Generic methods
impl<'sess> State<'sess, '_> {
pub(super) fn new(
sm: &'sess SourceMap,
config: Arc<FormatterConfig>,
inline_config: InlineConfig<()>,
comments: Comments,
) -> Self {
Self {
s: pp::Printer::new(
config.line_length,
if matches!(config.style, IndentStyle::Tab) {
Some(config.tab_width)
} else {
None
},
),
ind: config.tab_width as isize,
sm,
comments,
config,
inline_config,
cursor: SourcePos { pos: BytePos::from_u32(0), enabled: true },
has_crlf: false,
contract: None,
single_line_stmt: None,
call_with_opts_and_args: false,
skip_index_break: false,
binary_expr: None,
return_bin_expr: false,
emit_or_revert: false,
var_init: false,
block_depth: 0,
call_stack: CallStack::default(),
}
}
/// Checks a span of the source for a carriage return (`\r`) to determine if the file
/// uses CRLF line endings.
///
/// If a `\r` is found, `self.has_crlf` is set to `true`. This is intended to be
/// called once at the beginning of the formatting process for efficiency.
fn check_crlf(&mut self, span: Span) {
if let Ok(snip) = self.sm.span_to_snippet(span)
&& snip.contains('\r')
{
self.has_crlf = true;
}
}
/// Checks if the cursor is currently positioned at the start of a CRLF sequence (`\r\n`).
/// The check is only meaningful if `self.has_crlf` is true.
fn is_at_crlf(&self) -> bool {
self.has_crlf && self.char_at(self.cursor.pos) == Some('\r')
}
/// Computes the space left, bounded by the max space left.
fn space_left(&self) -> usize {
std::cmp::min(self.s.space_left(), self.max_space_left(0))
}
/// Computes the maximum space left given the context information available:
/// `block_depth`, `tab_width`, and a user-defined unavailable size `prefix_len`.
fn max_space_left(&self, prefix_len: usize) -> usize {
self.config
.line_length
.saturating_sub(self.block_depth * self.config.tab_width + prefix_len)
}
fn break_offset_if_not_bol(&mut self, n: usize, off: isize, search: bool) {
// When searching, the break token is expected to be inside a closed box. Thus, we will
// traverse the buffer and evaluate the first non-end token.
if search {
// We do something pretty sketchy here: tuck the nonzero offset-adjustment we
// were going to deposit along with the break into the previous hardbreak.
self.find_and_replace_last_token_still_buffered(
pp::Printer::hardbreak_tok_offset(off),
|token| token.is_hardbreak(),
);
return;
}
// When not explicitly searching, the break token is expected to be the last token.
if !self.is_beginning_of_line() {
self.break_offset(n, off)
} else if off != 0
&& let Some(last_token) = self.last_token_still_buffered()
&& last_token.is_hardbreak()
{
// We do something pretty sketchy here: tuck the nonzero offset-adjustment we
// were going to deposit along with the break into the previous hardbreak.
self.replace_last_token_still_buffered(pp::Printer::hardbreak_tok_offset(off));
}
}
fn braces_break(&mut self) {
if self.config.bracket_spacing {
self.space();
} else {
self.zerobreak();
}
}
}
/// Span to source.
impl State<'_, '_> {
fn char_at(&self, pos: BytePos) -> Option<char> {
let res = self.sm.lookup_byte_offset(pos);
res.sf.src.get(res.pos.to_usize()..)?.chars().next()
}
fn print_span(&mut self, span: Span) {
match self.sm.span_to_snippet(span) {
Ok(s) => self.s.word(if matches!(self.config.style, IndentStyle::Tab) {
snippet_with_tabs(s, self.config.tab_width)
} else {
s
}),
Err(e) => panic!("failed to print {span:?}: {e:#?}"),
}
// Drop comments that are included in the span.
while let Some(cmnt) = self.peek_comment() {
if cmnt.pos() >= span.hi() {
break;
}
let _ = self.next_comment().unwrap();
}
// Update cursor
self.cursor.advance_to(span.hi(), false);
}
/// Returns `true` if the span is disabled and has been printed as-is.
#[must_use]
fn handle_span(&mut self, span: Span, skip_prev_cmnts: bool) -> bool {
if !skip_prev_cmnts {
self.print_comments(span.lo(), CommentConfig::default());
}
self.print_span_if_disabled(span)
}
/// Returns `true` if the span is disabled and has been printed as-is.
#[inline]
#[must_use]
fn print_span_if_disabled(&mut self, span: Span) -> bool {
let cursor_span = self.cursor.span(span.hi());
if self.inline_config.is_disabled(cursor_span) {
self.print_span_cold(cursor_span);
return true;
}
if self.inline_config.is_disabled(span) {
self.print_span_cold(span);
return true;
}
false
}
#[cold]
fn print_span_cold(&mut self, span: Span) {
self.print_span(span);
}
fn print_tokens(&mut self, tokens: &[token::Token]) {
// Leave unchanged.
let span = Span::join_first_last(tokens.iter().map(|t| t.span));
self.print_span(span);
}
fn print_word(&mut self, w: impl Into<Cow<'static, str>>) {
let cow = w.into();
self.cursor.advance(cow.len() as u32);
self.word(cow);
}
fn print_sep(&mut self, sep: Separator) {
if self.handle_span(
self.cursor.span(self.cursor.pos + if self.is_at_crlf() { 2 } else { 1 }),
true,
) {
return;
}
self.print_sep_unhandled(sep);
}
fn print_sep_unhandled(&mut self, sep: Separator) {
let is_at_crlf = self.is_at_crlf();
sep.print(&mut self.s, &mut self.cursor, is_at_crlf);
}
fn print_ident(&mut self, ident: &ast::Ident) {
if self.handle_span(ident.span, true) {
return;
}
self.print_comments(ident.span.lo(), CommentConfig::skip_ws());
self.word(ident.to_string());
}
fn print_inside_parens<F>(&mut self, f: F)
where
F: FnOnce(&mut Self),
{
self.print_word("(");
f(self);
self.print_word(")");
}
fn estimate_size(&self, span: Span) -> usize {
if let Ok(snip) = self.sm.span_to_snippet(span) {
let (mut size, mut first, mut prev_needs_space) = (0, true, false);
for line in snip.lines() {
let line = line.trim();
if prev_needs_space {
size += 1;
} else if !first && let Some(char) = line.chars().next() {
// A line break or a space are required if this line:
// - starts with an operator.
// - starts with one of the ternary operators
// - starts with a bracket and fmt config forces bracket spacing.
match char {
'&' | '|' | '=' | '>' | '<' | '+' | '-' | '*' | '/' | '%' | '^' | '?'
| ':' => size += 1,
'}' | ')' | ']' if self.config.bracket_spacing => size += 1,
_ => (),
}
}
first = false;
// trim spaces before and after mixed comments
let mut search = line;
loop {
if let Some((lhs, comment)) = search.split_once(r#"/*"#) {
size += lhs.trim_end().len() + 2;
search = comment;
} else if let Some((comment, rhs)) = search.split_once(r#"*/"#) {
size += comment.len() + 2;
search = rhs;
} else {
size += search.trim().len();
break;
}
}
// Next line requires a line break if this one:
// - ends with a bracket and fmt config forces bracket spacing.
// - ends with ',' a line break or a space are required.
// - ends with ';' a line break is required.
prev_needs_space = match line.chars().next_back() {
Some('[') | Some('(') | Some('{') => self.config.bracket_spacing,
Some(',') | Some(';') => true,
_ => false,
};
}
return size;
}
span.to_range().len()
}
fn same_source_line(&self, a: BytePos, b: BytePos) -> bool {
self.sm.lookup_char_pos(a).line == self.sm.lookup_char_pos(b).line
}
}
/// Comment-related methods.
impl<'sess> State<'sess, '_> {
/// Returns `None` if the span is disabled and has been printed as-is.
#[must_use]
fn handle_comment(&mut self, cmnt: Comment, skip_break: bool) -> Option<Comment> {
if self.cursor.enabled {
if self.inline_config.is_disabled(cmnt.span) {
if cmnt.style.is_trailing() && !self.last_token_is_space() {
self.nbsp();
}
self.print_span_cold(cmnt.span);
if !skip_break && (cmnt.style.is_isolated() || cmnt.style.is_trailing()) {
self.print_sep(Separator::Hardbreak);
}
return None;
}
} else if self.print_span_if_disabled(cmnt.span) {
if !skip_break && (cmnt.style.is_isolated() || cmnt.style.is_trailing()) {
self.print_sep(Separator::Hardbreak);
}
return None;
}
Some(cmnt)
}
fn cmnt_config(&self) -> CommentConfig {
CommentConfig { ..Default::default() }
}
fn print_docs(&mut self, docs: &'_ ast::DocComments<'_>) {
// Intetionally no-op. Handled with `self.comments`.
let _ = docs;
}
/// Prints comments that are before the given position.
///
/// Returns `Some` with the style of the last comment printed, or `None` if no comment was
/// printed.
fn print_comments(&mut self, pos: BytePos, mut config: CommentConfig) -> Option<CommentStyle> {
let mut last_style: Option<CommentStyle> = None;
let mut is_leading = true;
let config_cache = config;
let mut buffered_blank = None;
while self.peek_comment().is_some_and(|c| c.pos() < pos) {
let mut cmnt = self.next_comment().unwrap();
let style_cache = cmnt.style;
// Merge consecutive line doc comments when converting to block style
if self.config.docs_style == foundry_config::fmt::DocCommentStyle::Block
&& cmnt.is_doc
&& cmnt.kind == ast::CommentKind::Line
{
let mut ref_line = self.sm.lookup_char_pos(cmnt.span.hi()).line;
while let Some(next_cmnt) = self.peek_comment() {
if !next_cmnt.is_doc
|| next_cmnt.kind != ast::CommentKind::Line
|| ref_line + 1 != self.sm.lookup_char_pos(next_cmnt.span.lo()).line
{
break;
}
let next_to_merge = self.next_comment().unwrap();
cmnt.lines.extend(next_to_merge.lines);
cmnt.span = cmnt.span.to(next_to_merge.span);
ref_line += 1;
}
}
// Ensure breaks are never skipped when there are multiple comments
if self.peek_comment_before(pos).is_some() {
config.iso_no_break = false;
config.trailing_no_break = false;
}
// Handle disabled comments
let Some(cmnt) = self.handle_comment(
cmnt,
if style_cache.is_isolated() {
config.iso_no_break
} else {
config.trailing_no_break
},
) else {
last_style = Some(style_cache);
continue;
};
if cmnt.style.is_blank() {
match config.skip_blanks {
Some(Skip::All) => continue,
Some(Skip::Leading { resettable: true }) if is_leading => continue,
Some(Skip::Leading { resettable: false }) if last_style.is_none() => continue,
Some(Skip::Trailing) => {
buffered_blank = Some(cmnt);
continue;
}
_ => (),
}
// Never print blank lines after docs comments
} else if !cmnt.is_doc {
is_leading = false;
}
if let Some(blank) = buffered_blank.take() {
self.print_comment(blank, config);
}
// Handle mixed with follow-up comment
if cmnt.style.is_mixed() {
if let Some(cmnt) = self.peek_comment_before(pos) {
config.mixed_no_break_prev = true;
config.mixed_no_break_post = true;
config.mixed_post_nbsp = cmnt.style.is_mixed();
}
// Ensure consecutive mixed comments don't have a double-space
if last_style.is_some_and(|s| s.is_mixed()) {
config.mixed_no_break_prev = true;
config.mixed_no_break_post = true;
config.mixed_prev_space = false;
}
} else if config.offset != 0
&& cmnt.style.is_isolated()
&& last_style.is_some_and(|s| s.is_isolated())
{
self.offset(config.offset);
}
last_style = Some(cmnt.style);
self.print_comment(cmnt, config);
config = config_cache;
}
last_style
}
/// Prints a line, wrapping it if it starts with the given prefix.
fn print_wrapped_line(
&mut self,
line: &str,
prefix: &'static str,
break_offset: isize,
is_doc: bool,
) {
if !line.starts_with(prefix) {
self.word(line.to_owned());
return;
}
fn post_break_prefix(prefix: &'static str, has_content: bool) -> &'static str {
if !has_content {
return prefix;
}
match prefix {
"///" => "/// ",
"//" => "// ",
"/*" => "/* ",
" *" => " * ",
_ => prefix,
}
}
self.ibox(0);
self.word(prefix);
let content = &line[prefix.len()..];
let content = if is_doc {
// Doc comments preserve leading whitespaces (right after the prefix) as nbps.
let ws_len = content
.char_indices()
.take_while(|(_, c)| c.is_whitespace())
.last()
.map_or(0, |(idx, c)| idx + c.len_utf8());
let (leading_ws, rest) = content.split_at(ws_len);
if !leading_ws.is_empty() {
self.word(leading_ws.to_owned());
}
rest
} else {
// Non-doc comments: replace first whitespace with nbsp, rest of content continues
if let Some(first_char) = content.chars().next() {
if first_char.is_whitespace() {
self.nbsp();
&content[first_char.len_utf8()..]
} else {
content
}
} else {
""
}
};
let post_break = post_break_prefix(prefix, !content.is_empty());
// Process content character by character to preserve consecutive whitespaces
let (mut chars, mut current_word) = (content.chars().peekable(), String::new());
while let Some(ch) = chars.next() {
if ch.is_whitespace() {
// Print current word
if !current_word.is_empty() {
self.word(std::mem::take(&mut current_word));
}
// Preserve multiple spaces while adding a single break
let mut ws_count = 1;
while chars.peek().is_some_and(|c| c.is_whitespace()) {
ws_count += 1;
chars.next();
}
self.s.scan_break(BreakToken {
offset: break_offset,
blank_space: ws_count,
post_break: if post_break.starts_with("/*") { None } else { Some(post_break) },
..Default::default()
});
continue;
}
current_word.push(ch);
}
// Print final word
if !current_word.is_empty() {
self.word(current_word);
}
self.end();
}
/// Merges consecutive line comments to avoid orphan words.
fn merge_comment_lines(&self, lines: &[String], prefix: &str) -> Vec<String> {
// Do not apply smart merging to block comments
if lines.is_empty() || lines.len() < 2 || !prefix.starts_with("//") {
return lines.to_vec();
}
let mut result = Vec::new();
let mut i = 0;
while i < lines.len() {
let current_line = &lines[i];
// Keep empty lines, and non-prefixed lines, untouched
if current_line.trim().is_empty() || !current_line.starts_with(prefix) {
result.push(current_line.clone());
i += 1;
continue;
}
if i + 1 < lines.len() {
let next_line = &lines[i + 1];
// Check if next line is has the same prefix and is not empty
if next_line.starts_with(prefix) && !next_line.trim().is_empty() {
// Only merge if the current line doesn't fit within available width
if estimate_line_width(current_line, self.config.tab_width) > self.space_left()
{
// Merge the lines and let the wrapper handle breaking if needed
let merged_line = format!(
"{current_line} {next_content}",
next_content = &next_line[prefix.len()..].trim_start()
);
result.push(merged_line);
// Skip both lines since they are merged
i += 2;
continue;
}
}
}
// No merge possible, keep the line as-is
result.push(current_line.clone());
i += 1;
}
result
}
fn print_comment(&mut self, mut cmnt: Comment, mut config: CommentConfig) {
self.cursor.advance_to(cmnt.span.hi(), true);
if cmnt.is_doc {
cmnt = style_doc_comment(self.config.docs_style, cmnt);
}
match cmnt.style {
CommentStyle::Mixed => {
let Some(prefix) = cmnt.prefix() else { return };
let never_break = self.last_token_is_neverbreak();
if !self.is_bol_or_only_ind() {
match (never_break || config.mixed_no_break_prev, config.mixed_prev_space) {
(false, true) => config.space(&mut self.s),
(false, false) => config.zerobreak(&mut self.s),
(true, true) => self.nbsp(),
(true, false) => (),
};
}
if self.config.wrap_comments {
// Merge and wrap comments
let merged_lines = self.merge_comment_lines(&cmnt.lines, prefix);
for (pos, line) in merged_lines.into_iter().delimited() {
self.print_wrapped_line(&line, prefix, 0, cmnt.is_doc);
if !pos.is_last {
self.hardbreak();
}
}
} else {
// No wrapping, print as-is
for (pos, line) in cmnt.lines.into_iter().delimited() {
self.word(line);
if !pos.is_last {
self.hardbreak();
}
}
}
if config.mixed_post_nbsp {
config.nbsp_or_space(self.config.wrap_comments, &mut self.s);
self.cursor.advance(1);
} else if !config.mixed_no_break_post {
config.space(&mut self.s);
self.cursor.advance(1);
}
}
CommentStyle::Isolated => {
let Some(mut prefix) = cmnt.prefix() else { return };
if !config.iso_no_break {
config.hardbreak_if_not_bol(self.is_bol_or_only_ind(), &mut self.s);
}
if self.config.wrap_comments {
// Merge and wrap comments
let merged_lines = self.merge_comment_lines(&cmnt.lines, prefix);
for (pos, line) in merged_lines.into_iter().delimited() {
let hb = |this: &mut Self| {
this.hardbreak();
if pos.is_last {
this.cursor.next_line(this.is_at_crlf());
}
};
if line.is_empty() {
hb(self);
continue;
}
if pos.is_first {
self.ibox(config.offset);
if cmnt.is_doc && matches!(prefix, "/**") {
self.word(prefix);
hb(self);
prefix = " * ";
continue;
}
}
self.print_wrapped_line(&line, prefix, 0, cmnt.is_doc);
if pos.is_last {
self.end();
if !config.iso_no_break {
hb(self);
}
} else {
hb(self);
}
}
} else {
// No wrapping, print as-is
for (pos, line) in cmnt.lines.into_iter().delimited() {
let hb = |this: &mut Self| {
this.hardbreak();
if pos.is_last {
this.cursor.next_line(this.is_at_crlf());
}
};
if line.is_empty() {
hb(self);
continue;
}
if pos.is_first {
self.ibox(config.offset);
if cmnt.is_doc && matches!(prefix, "/**") {
self.word(prefix);
hb(self);
prefix = " * ";
continue;
}
}
self.word(line);
if pos.is_last {
self.end();
if !config.iso_no_break {
hb(self);
}
} else {
hb(self);
}
}
}
}
CommentStyle::Trailing => {
let Some(prefix) = cmnt.prefix() else { return };
self.neverbreak();
if !self.is_bol_or_only_ind() {
self.nbsp();
}
if !self.config.wrap_comments && cmnt.lines.len() == 1 {
self.word(cmnt.lines.pop().unwrap());
} else if self.config.wrap_comments {
if cmnt.is_doc || matches!(cmnt.kind, ast::CommentKind::Line) {
config.offset = 0;
} else {
config.offset = self.ind;
}
for (lpos, line) in cmnt.lines.into_iter().delimited() {
if !line.is_empty() {
self.print_wrapped_line(&line, prefix, config.offset, cmnt.is_doc);
}
if !lpos.is_last {
config.hardbreak(&mut self.s);
}
}
} else {
self.visual_align();
for (pos, line) in cmnt.lines.into_iter().delimited() {
if !line.is_empty() {
self.word(line);
if !pos.is_last {
self.hardbreak();
}
}
}
self.end();
}
if !config.trailing_no_break {
self.print_sep(Separator::Hardbreak);
}
}
CommentStyle::BlankLine => {
// Pre-requisite: ensure that blank links are printed at the beginning of new line.
if !self.last_token_is_break() && !self.is_bol_or_only_ind() {
config.hardbreak(&mut self.s);
self.cursor.next_line(self.is_at_crlf());
}
// We need to do at least one, possibly two hardbreaks.
let twice = match self.last_token() {
Some(Token::String(s)) => ";" == s,
Some(Token::Begin(_)) => true,
Some(Token::End) => true,
_ => false,
};
if twice {
config.hardbreak(&mut self.s);
self.cursor.next_line(self.is_at_crlf());
}
config.hardbreak(&mut self.s);
self.cursor.next_line(self.is_at_crlf());
}
}
}
fn peek_comment<'b>(&'b self) -> Option<&'b Comment>
where
'sess: 'b,
{
self.comments.peek()
}
fn peek_comment_before<'b>(&'b self, pos: BytePos) -> Option<&'b Comment>
where
'sess: 'b,
{
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/fmt/src/state/common.rs | crates/fmt/src/state/common.rs | use super::{CommentConfig, Separator, State};
use crate::pp::{BreakToken, Printer, SIZE_INFINITY};
use foundry_common::iter::IterDelimited;
use foundry_config::fmt as config;
use itertools::{Either, Itertools};
use solar::parse::{
Cursor,
ast::{self, Span},
interface::BytePos,
};
use std::{borrow::Cow, fmt::Debug};
pub(crate) trait LitExt<'ast> {
fn is_str_concatenation(&self) -> bool;
}
impl<'ast> LitExt<'ast> for ast::Lit<'ast> {
/// Checks if a the input literal is a string literal with multiple parts.
fn is_str_concatenation(&self) -> bool {
if let ast::LitKind::Str(_, _, parts) = &self.kind { !parts.is_empty() } else { false }
}
}
/// Language-specific pretty printing. Common for both: Solidity + Yul.
impl<'ast> State<'_, 'ast> {
pub(super) fn print_lit_inner(&mut self, lit: &'ast ast::Lit<'ast>, is_yul: bool) {
let ast::Lit { span, symbol, ref kind } = *lit;
if self.handle_span(span, false) {
return;
}
match *kind {
ast::LitKind::Str(kind, ..) => {
self.s.ibox(0);
for (pos, (span, symbol)) in lit.literals().delimited() {
if !self.handle_span(span, false) {
let quote_pos = span.lo() + kind.prefix().len() as u32;
self.print_str_lit(kind, quote_pos, symbol.as_str());
}
if !pos.is_last {
if !self.print_trailing_comment(span.hi(), None) {
self.space_if_not_bol();
}
} else {
self.neverbreak();
}
}
self.end();
}
ast::LitKind::Number(_) | ast::LitKind::Rational(_) => {
self.print_num_literal(symbol.as_str(), is_yul);
}
ast::LitKind::Address(value) => self.word(value.to_string()),
ast::LitKind::Bool(value) => self.word(if value { "true" } else { "false" }),
ast::LitKind::Err(_) => self.word(symbol.to_string()),
}
}
fn print_num_literal(&mut self, source: &str, is_yul: bool) {
fn strip_underscores_if(b: bool, s: &str) -> Cow<'_, str> {
if b && s.contains('_') { Cow::Owned(s.replace('_', "")) } else { Cow::Borrowed(s) }
}
fn add_underscores(
out: &mut String,
config: config::NumberUnderscore,
string: &str,
is_dec: bool,
is_yul: bool,
reversed: bool,
) {
// The underscore thousand separator is only valid in Solidity decimal numbers.
// It is not supported by hex numbers, nor Yul literals.
// https://github.com/foundry-rs/foundry/issues/12111
if !config.is_thousands() || !is_dec || is_yul || string.len() < 5 {
out.push_str(string);
return;
}
let chunks = if reversed {
Either::Left(string.as_bytes().chunks(3))
} else {
Either::Right(string.as_bytes().rchunks(3).rev())
}
.map(|chunk| std::str::from_utf8(chunk).unwrap());
for chunk in Itertools::intersperse(chunks, "_") {
out.push_str(chunk);
}
}
debug_assert!(source.is_ascii(), "{source:?}");
let config = self.config.number_underscore;
let is_dec = !["0x", "0b", "0o"].iter().any(|prefix| source.starts_with(prefix));
let (val, exp) = if !is_dec {
(source, "")
} else {
source.split_once(['e', 'E']).unwrap_or((source, ""))
};
let (val, fract) = val.split_once('.').unwrap_or((val, ""));
let strip_underscores = !config.is_preserve() || is_yul;
let mut val = &strip_underscores_if(strip_underscores, val)[..];
let mut exp = &strip_underscores_if(strip_underscores, exp)[..];
let mut fract = &strip_underscores_if(strip_underscores, fract)[..];
// strip any padded 0's
let mut exp_sign = "";
if is_dec {
val = val.trim_start_matches('0');
fract = fract.trim_end_matches('0');
(exp_sign, exp) =
if let Some(exp) = exp.strip_prefix('-') { ("-", exp) } else { ("", exp) };
exp = exp.trim_start_matches('0');
}
let mut out = String::with_capacity(source.len() * 2);
if val.is_empty() {
out.push('0');
} else {
add_underscores(&mut out, config, val, is_dec, is_yul, false);
}
if source.contains('.') {
out.push('.');
match (fract.is_empty(), exp.is_empty()) {
// `X.YeZ`: keep as is
(false, false) => out.push_str(fract),
// `X.Y`
(false, true) => add_underscores(&mut out, config, fract, is_dec, is_yul, true),
// `X.` -> `X.0`
(true, _) => out.push('0'),
};
}
if !exp.is_empty() {
out.push('e');
out.push_str(exp_sign);
add_underscores(&mut out, config, exp, is_dec, is_yul, false);
}
self.word(out);
}
/// `s` should be the *unescaped contents of the string literal*.
pub(super) fn print_str_lit(&mut self, kind: ast::StrKind, quote_pos: BytePos, s: &str) {
self.print_comments(quote_pos, CommentConfig::default());
let s = self.str_lit_to_string(kind, quote_pos, s);
self.word(s);
}
/// `s` should be the *unescaped contents of the string literal*.
fn str_lit_to_string(&self, kind: ast::StrKind, quote_pos: BytePos, s: &str) -> String {
let prefix = kind.prefix();
let quote = match self.config.quote_style {
config::QuoteStyle::Double => '\"',
config::QuoteStyle::Single => '\'',
config::QuoteStyle::Preserve => self.char_at(quote_pos).unwrap_or_default(),
};
debug_assert!(matches!(quote, '\"' | '\''), "{quote:?}");
let s = solar::parse::interface::data_structures::fmt::from_fn(move |f| {
if matches!(kind, ast::StrKind::Hex) {
match self.config.hex_underscore {
config::HexUnderscore::Preserve => {}
config::HexUnderscore::Remove | config::HexUnderscore::Bytes => {
let mut clean = s.to_string().replace('_', "");
if matches!(self.config.hex_underscore, config::HexUnderscore::Bytes) {
clean =
clean.chars().chunks(2).into_iter().map(|c| c.format("")).join("_");
}
return f.write_str(&clean);
}
};
}
f.write_str(s)
});
let mut s = format!("{prefix}{quote}{s}{quote}");
// If the output is not a single token then revert to the original quote.
#[allow(unstable_name_collisions)]
if Cursor::new(&s).exactly_one().is_err() {
let other_quote = if quote == '\"' { '\'' } else { '\"' };
{
let s = unsafe { s.as_bytes_mut() };
s[prefix.len()] = other_quote as u8;
s[s.len() - 1] = other_quote as u8;
}
debug_assert!(Cursor::new(&s).exactly_one().map(|_| true).unwrap());
}
s
}
pub(super) fn print_tuple_empty(&mut self, pos_lo: BytePos, pos_hi: BytePos) {
if self.handle_span(Span::new(pos_lo, pos_hi), true) {
return;
}
self.print_inside_parens(|state| {
state.s.cbox(state.ind);
if let Some(cmnt) =
state.print_comments(pos_hi, CommentConfig::skip_ws().mixed_prev_space())
{
if cmnt.is_mixed() {
state.s.offset(-state.ind);
} else {
state.break_offset_if_not_bol(0, -state.ind, false);
}
}
state.end();
});
}
pub(super) fn print_tuple<'a, T, P, S>(
&mut self,
values: &'a [T],
pos_lo: BytePos,
pos_hi: BytePos,
mut print: P,
mut get_span: S,
format: ListFormat,
) where
P: FnMut(&mut Self, &'a T),
S: FnMut(&T) -> Span,
{
if self.handle_span(Span::new(pos_lo, pos_hi), true) {
return;
}
if values.is_empty() {
self.print_tuple_empty(pos_lo, pos_hi);
return;
}
if !(values.len() == 1 && format.is_inline()) {
// Use commasep
self.print_inside_parens(|state| {
state.commasep(values, pos_lo, pos_hi, print, get_span, format)
});
return;
}
// Format single-item inline lists directly without boxes
self.print_inside_parens(|state| {
let span = get_span(&values[0]);
state.s.cbox(state.ind);
let mut skip_break = true;
if state.peek_comment_before(span.hi()).is_some() {
state.hardbreak();
skip_break = false;
}
state.print_comments(span.lo(), CommentConfig::skip_ws().mixed_prev_space());
print(state, &values[0]);
if !state.print_trailing_comment(span.hi(), None) && skip_break {
state.neverbreak();
} else {
state.break_offset_if_not_bol(0, -state.ind, false);
}
state.end();
});
}
pub(super) fn print_array<'a, T, P, S>(
&mut self,
values: &'a [T],
span: Span,
print: P,
get_span: S,
) where
P: FnMut(&mut Self, &'a T),
S: FnMut(&T) -> Span,
{
if self.handle_span(span, false) {
return;
}
self.print_word("[");
self.commasep(values, span.lo(), span.hi(), print, get_span, ListFormat::compact());
self.print_word("]");
}
pub(super) fn commasep_opening_logic<T, S>(
&mut self,
values: &[T],
mut get_span: S,
format: ListFormat,
manual_opening: bool,
) -> bool
where
S: FnMut(&T) -> Span,
{
let Some(span) = values.first().map(&mut get_span) else {
return false;
};
// If first item is uninformed (just a comma), and it has its own comment, skip it.
// It will be dealt with when printing the item in the main loop of `commasep`.
if span.is_dummy()
&& let Some(next_pos) = values.get(1).map(|v| get_span(v).lo())
&& self.peek_comment_before(next_pos).is_some()
{
return true;
}
// Check for comments before the first item.
if let Some((cmnt_span, cmnt_style)) =
self.peek_comment_before(span.lo()).map(|c| (c.span, c.style))
{
let cmnt_disabled = self.inline_config.is_disabled(cmnt_span);
// Handle special formatting for disabled code with isolated comments.
if self.cursor.enabled && cmnt_disabled && cmnt_style.is_isolated() {
self.print_sep(Separator::Hardbreak);
if !format.with_delimiters {
self.s.offset(self.ind);
}
};
// If manual opening flag is passed, we simply force the break, and skip the comment.
// It will be dealt with when printing the item in the main loop of `commasep`.
if manual_opening {
self.hardbreak();
self.s.offset(self.ind);
return true;
}
let cmnt_config = if format.with_delimiters {
CommentConfig::skip_ws().mixed_no_break().mixed_prev_space()
} else {
CommentConfig::skip_ws().no_breaks().mixed_prev_space().offset(self.ind)
};
// Apply spacing based on comment styles.
if let Some(last_style) = self.print_comments(span.lo(), cmnt_config) {
match (cmnt_style.is_mixed(), last_style.is_mixed()) {
(true, true) => {
if format.breaks_cmnts {
self.hardbreak();
} else {
self.space();
}
if !format.with_delimiters && !cmnt_disabled {
self.s.offset(self.ind);
}
}
(false, true) => {
self.nbsp();
}
(false, false) if !format.with_delimiters && !cmnt_disabled => {
self.hardbreak();
self.s.offset(self.ind);
}
_ => {}
}
}
if self.cursor.enabled {
self.cursor.advance_to(span.lo(), true);
}
return true;
}
if self.cursor.enabled {
self.cursor.advance_to(span.lo(), true);
}
if !values.is_empty() && !format.with_delimiters {
format.print_break(true, values.len(), &mut self.s);
self.s.offset(self.ind);
return true;
}
false
}
pub(super) fn commasep<'a, T, P, S>(
&mut self,
values: &'a [T],
_pos_lo: BytePos,
pos_hi: BytePos,
mut print: P,
mut get_span: S,
format: ListFormat,
) where
P: FnMut(&mut Self, &'a T),
S: FnMut(&T) -> Span,
{
if values.is_empty() {
return;
}
// We can't simply check `peek_comment_before(pos_hi)` cause we would also account for
// comments in the child expression, and those don't matter.
let has_comments =
// check for comments before the first element
self.peek_comment_before(get_span(&values[0]).lo()).is_some() ||
// check for comments between elements
values.windows(2).any(|w| self.peek_comment_between(get_span(&w[0]).hi(), get_span(&w[1]).lo()).is_some()) ||
// check for comments after the last element
self.peek_comment_between(get_span(values.last().unwrap()).hi(), pos_hi).is_some();
// For calls with opts and args, which should break consistently, we need to skip the
// wrapping cbox to prioritize call args breaking before the call opts. Because of that, we
// must manually offset the breaks between args, so that they are properly indented.
let manual_opening =
format.is_consistent() && !format.with_delimiters && self.call_with_opts_and_args;
// When there are comments, we can preserve the cbox, as they will make it break
let manual_offset = !has_comments && manual_opening;
let is_single_without_cmnts = values.len() == 1 && !format.break_single && !has_comments;
let skip_first_break = if format.with_delimiters || format.is_inline() {
self.s.cbox(if format.no_ind { 0 } else { self.ind });
if is_single_without_cmnts {
true
} else {
self.commasep_opening_logic(values, &mut get_span, format, manual_opening)
}
} else {
let res = self.commasep_opening_logic(values, &mut get_span, format, manual_opening);
if !manual_offset {
self.s.cbox(if format.no_ind { 0 } else { self.ind });
}
res
};
if let Some(sym) = format.prev_symbol() {
self.word_space(sym);
} else if is_single_without_cmnts && format.with_space {
self.nbsp();
} else if !skip_first_break && !format.is_inline() {
format.print_break(true, values.len(), &mut self.s);
if manual_offset {
self.s.offset(self.ind);
}
}
if format.is_compact() && !(format.breaks_with_comments() && has_comments) {
self.s.cbox(0);
}
let mut last_delimiter_break = !format.with_delimiters;
let mut skip_last_break =
is_single_without_cmnts || !format.with_delimiters || format.is_inline();
for (i, value) in values.iter().enumerate() {
let is_last = i == values.len() - 1;
if self
.print_comments(get_span(value).lo(), CommentConfig::skip_ws().mixed_prev_space())
.is_some_and(|cmnt| cmnt.is_mixed())
&& format.breaks_cmnts
{
self.hardbreak(); // trailing and isolated comments already hardbreak
}
// Avoid printing the last uninformed item, so that we can handle line breaks.
if !(is_last && get_span(value).is_dummy()) {
print(self, value);
}
let next_span = if is_last { None } else { Some(get_span(&values[i + 1])) };
let next_pos = next_span.map(Span::lo).unwrap_or(pos_hi);
let cmnt_before_next =
self.peek_comment_before(next_pos).map(|cmnt| (cmnt.span, cmnt.style));
if !is_last {
// Handle disabled lines with comments after the value, but before the comma.
if cmnt_before_next.is_some_and(|(cmnt_span, _)| {
let span = self.cursor.span(cmnt_span.lo());
self.inline_config.is_disabled(span)
// NOTE: necessary workaround to patch this edgecase due to lack of spans for the commas.
&& self.sm.span_to_snippet(span).is_ok_and(|snip| !snip.contains(','))
}) {
self.print_comments(
next_pos,
CommentConfig::skip_ws().mixed_no_break().mixed_prev_space(),
);
}
self.print_word(",");
}
if !is_last
&& format.breaks_cmnts
&& cmnt_before_next.is_some_and(|(cmnt_span, cmnt_style)| {
let disabled = self.inline_config.is_disabled(cmnt_span);
(cmnt_style.is_mixed() && !disabled) || (cmnt_style.is_isolated() && disabled)
})
{
self.hardbreak(); // trailing and isolated comments already hardbreak
}
// Print trailing comments.
let comment_config = if !is_last || format.with_delimiters {
CommentConfig::skip_ws().mixed_no_break().mixed_prev_space()
} else {
CommentConfig::skip_ws().no_breaks().mixed_prev_space()
};
let with_trailing = self.print_comments(next_pos, comment_config).is_some();
if is_last && with_trailing {
if self.is_bol_or_only_ind() {
// if a trailing comment is printed at the very end, we have to manually adjust
// the offset to avoid having a double break.
self.break_offset_if_not_bol(0, -self.ind, false);
} else {
self.s.break_offset(SIZE_INFINITY as usize, -self.ind);
}
skip_last_break = true;
last_delimiter_break = false;
}
// Final break if needed before the next value.
if let Some(next_span) = next_span
&& !self.is_bol_or_only_ind()
&& !self.inline_config.is_disabled(next_span)
&& !next_span.is_dummy()
{
format.print_break(false, values.len(), &mut self.s);
if manual_offset {
self.s.offset(self.ind);
}
}
}
if format.is_compact() && !(format.breaks_with_comments() && has_comments) {
self.end();
}
if !skip_last_break {
if let Some(sym) = format.post_symbol() {
format.print_break(false, values.len(), &mut self.s);
self.s.offset(-self.ind);
self.word(sym);
} else {
format.print_break(true, values.len(), &mut self.s);
self.s.offset(-self.ind);
}
} else if is_single_without_cmnts && format.with_space {
self.nbsp();
} else if let Some(sym) = format.post_symbol() {
self.nbsp();
self.word(sym);
}
if !manual_offset {
self.end();
}
self.cursor.advance_to(pos_hi, true);
if last_delimiter_break {
format.print_break(true, values.len(), &mut self.s);
}
}
pub(super) fn print_path(&mut self, path: &'ast ast::PathSlice, consistent_break: bool) {
if consistent_break {
self.s.cbox(self.ind);
} else {
self.s.ibox(self.ind);
}
for (pos, ident) in path.segments().iter().delimited() {
self.print_ident(ident);
if !pos.is_last {
if !self.emit_or_revert {
self.zerobreak();
}
self.word(".");
}
}
self.end();
}
pub(super) fn print_block_inner<T: Debug>(
&mut self,
block: &'ast [T],
block_format: BlockFormat,
mut print: impl FnMut(&mut Self, &'ast T),
mut get_block_span: impl FnMut(&'ast T) -> Span,
pos_hi: BytePos,
) {
// Attempt to print in a single line.
if block_format.attempt_single_line() && block.len() == 1 {
self.print_single_line_block(block, block_format, print, get_block_span);
return;
}
// Empty blocks with comments require special attention.
if block.is_empty() {
self.print_empty_block(block_format, pos_hi);
return;
}
// update block depth
self.block_depth += 1;
// Print multiline block comments.
let block_lo = get_block_span(&block[0]).lo();
match block_format {
BlockFormat::NoBraces(None) => {
if !self.handle_span(self.cursor.span(block_lo), false) {
self.print_comments(block_lo, CommentConfig::default());
}
self.s.cbox(0);
}
BlockFormat::NoBraces(Some(offset)) => {
let enabled =
!self.inline_config.is_disabled(Span::new(block_lo, block_lo + BytePos(1)))
&& !self.handle_span(self.cursor.span(block_lo), true);
match self.peek_comment().and_then(|cmnt| {
if cmnt.span.hi() < block_lo { Some((cmnt.span, cmnt.style)) } else { None }
}) {
Some((span, style)) => {
if enabled {
// Inline config is not disabled and span not handled
if !self.inline_config.is_disabled(span) || style.is_isolated() {
self.cursor.advance_to(span.lo(), true);
self.break_offset(SIZE_INFINITY as usize, offset);
}
if let Some(cmnt) = self.print_comments(
block_lo,
CommentConfig::skip_leading_ws(false).offset(offset),
) && !cmnt.is_mixed()
&& !cmnt.is_blank()
{
self.s.offset(offset);
}
} else if style.is_isolated() {
self.print_sep_unhandled(Separator::Hardbreak);
self.s.offset(offset);
}
}
None => {
if enabled {
self.zerobreak();
self.s.offset(offset);
} else if self.cursor.enabled {
self.print_sep_unhandled(Separator::Space);
self.s.offset(offset);
self.cursor.advance_to(block_lo, true);
}
}
}
self.s.cbox(self.ind);
}
_ => {
self.print_word("{");
self.s.cbox(self.ind);
if !self.handle_span(self.cursor.span(block_lo), false)
&& self
.print_comments(block_lo, CommentConfig::default())
.is_none_or(|cmnt| cmnt.is_mixed())
{
self.hardbreak_if_nonempty();
}
}
}
// Print multiline block statements.
for (i, stmt) in block.iter().enumerate() {
let is_last = i == block.len() - 1;
print(self, stmt);
let is_disabled = self.inline_config.is_disabled(get_block_span(stmt));
let mut next_enabled = false;
let mut next_lo = None;
if !is_last {
let next_span = get_block_span(&block[i + 1]);
next_enabled = !self.inline_config.is_disabled(next_span);
next_lo =
self.peek_comment_before(next_span.lo()).is_none().then_some(next_span.lo());
}
// when this stmt and the next one are enabled, break normally (except if last stmt)
if !is_disabled
&& next_enabled
&& (!is_last
|| self.peek_comment_before(pos_hi).is_some_and(|cmnt| cmnt.style.is_mixed()))
{
self.hardbreak_if_not_bol();
continue;
}
// when this stmt is disabled and the next one is enabled, break if there is no
// enabled preceding comment. Otherwise the breakpoint is handled by the comment.
if is_disabled
&& next_enabled
&& let Some(next_lo) = next_lo
&& self
.peek_comment_before(next_lo)
.is_none_or(|cmnt| self.inline_config.is_disabled(cmnt.span))
{
self.hardbreak_if_not_bol()
}
}
self.print_comments(
pos_hi,
CommentConfig::skip_trailing_ws().mixed_no_break().mixed_prev_space(),
);
if !block_format.breaks() {
if !self.last_token_is_break() {
self.hardbreak();
}
self.s.offset(-self.ind);
}
self.end();
if block_format.with_braces() {
self.print_word("}");
}
// restore block depth
self.block_depth -= 1;
}
fn print_single_line_block<T: Debug>(
&mut self,
block: &'ast [T],
block_format: BlockFormat,
mut print: impl FnMut(&mut Self, &'ast T),
mut get_block_span: impl FnMut(&'ast T) -> Span,
) {
self.s.cbox(self.ind);
match block_format {
BlockFormat::Compact(true) => {
self.scan_break(BreakToken { pre_break: Some("{"), ..Default::default() });
print(self, &block[0]);
self.print_comments(get_block_span(&block[0]).hi(), CommentConfig::default());
self.s.scan_break(BreakToken { post_break: Some("}"), ..Default::default() });
self.s.offset(-self.ind);
}
_ => {
self.word("{");
self.space();
print(self, &block[0]);
self.print_comments(get_block_span(&block[0]).hi(), CommentConfig::default());
self.space_if_not_bol();
self.s.offset(-self.ind);
self.word("}");
}
}
self.end();
}
fn print_empty_block(&mut self, block_format: BlockFormat, pos_hi: BytePos) {
let has_braces = block_format.with_braces();
// Trailing comments are printed after the block
if self.peek_comment_before(pos_hi).is_none_or(|c| c.style.is_trailing()) {
if self.config.bracket_spacing {
if has_braces {
self.word("{ }");
} else {
self.nbsp();
}
} else if has_braces {
self.word("{}");
}
self.print_comments(pos_hi, CommentConfig::skip_ws());
return;
}
// Non-trailing or mixed comments - print inside block
if has_braces {
self.word("{");
}
let mut offset = 0;
if let BlockFormat::NoBraces(Some(off)) = block_format {
offset = off;
}
self.print_comments(
pos_hi,
self.cmnt_config().offset(offset).mixed_no_break().mixed_prev_space().mixed_post_nbsp(),
);
self.print_comments(
pos_hi,
CommentConfig::default().mixed_no_break().mixed_prev_space().mixed_post_nbsp(),
);
if has_braces {
self.word("}");
}
}
}
/// Formatting style for comma-separated lists.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct ListFormat {
/// The core formatting strategy.
kind: ListFormatKind,
/// If `true`, it means that the list already carries indentation.
no_ind: bool,
/// If `true`, a single-element list may break.
break_single: bool,
/// If `true`, a comment within the list forces a break.
breaks_cmnts: bool,
/// If `true`, a space is added after the opening delimiter and before the closing one.
with_space: bool,
/// If `true`, the list is enclosed in delimiters.
with_delimiters: bool,
}
/// The kind of formatting style for a list.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ListFormatKind {
/// Always breaks for multiple elements.
AlwaysBreak,
/// Breaks all elements if any break.
Consistent,
/// Attempts to fit all elements in one line, before breaking consistently.
Compact,
/// The list is printed inline, without breaks.
Inline,
/// Special formatting for Yul return values.
Yul { sym_prev: Option<&'static str>, sym_post: Option<&'static str> },
}
impl Default for ListFormat {
fn default() -> Self {
Self {
kind: ListFormatKind::Consistent,
no_ind: false,
break_single: false,
breaks_cmnts: false,
with_space: false,
with_delimiters: true,
}
}
}
impl ListFormat {
// -- GETTER METHODS -------------------------------------------------------
pub(crate) fn prev_symbol(&self) -> Option<&'static str> {
if let ListFormatKind::Yul { sym_prev, .. } = self.kind { sym_prev } else { None }
}
pub(crate) fn post_symbol(&self) -> Option<&'static str> {
if let ListFormatKind::Yul { sym_post, .. } = self.kind { sym_post } else { None }
}
pub(crate) fn is_consistent(&self) -> bool {
matches!(self.kind, ListFormatKind::Consistent)
}
pub(crate) fn is_compact(&self) -> bool {
matches!(self.kind, ListFormatKind::Compact)
}
pub(crate) fn is_inline(&self) -> bool {
matches!(self.kind, ListFormatKind::Inline)
}
pub(crate) fn breaks_with_comments(&self) -> bool {
self.breaks_cmnts
}
// -- BUILDER METHODS ------------------------------------------------------
pub(crate) fn inline() -> Self {
Self { kind: ListFormatKind::Inline, ..Default::default() }
}
pub(crate) fn consistent() -> Self {
Self { kind: ListFormatKind::Consistent, ..Default::default() }
}
pub(crate) fn compact() -> Self {
Self { kind: ListFormatKind::Compact, ..Default::default() }
}
pub(crate) fn always_break() -> Self {
Self {
kind: ListFormatKind::AlwaysBreak,
breaks_cmnts: true,
break_single: true,
with_delimiters: true,
..Default::default()
}
}
pub(crate) fn yul(sym_prev: Option<&'static str>, sym_post: Option<&'static str>) -> Self {
Self {
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.