repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/errors.rs | crates/cast/src/errors.rs | //! Errors for this crate
use foundry_config::Chain;
use std::fmt;
/// An error thrown when resolving a function via signature failed
#[derive(Clone, Debug)]
pub enum FunctionSignatureError {
MissingSignature,
MissingEtherscan { sig: String },
UnknownChain(Chain),
MissingToAddress,
}
impl fmt::Display for FunctionSignatureError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MissingSignature => {
writeln!(f, "Function signature must be set")
}
Self::MissingEtherscan { sig } => {
writeln!(f, "Failed to determine function signature for `{sig}`")?;
writeln!(
f,
"To lookup a function signature of a deployed contract by name, a valid ETHERSCAN_API_KEY must be set."
)?;
write!(f, "\tOr did you mean:\t {sig}()")
}
Self::UnknownChain(chain) => {
write!(f, "Resolving via etherscan requires a known chain. Unknown chain: {chain}")
}
Self::MissingToAddress => f.write_str("Target address must be set"),
}
}
}
impl std::error::Error for FunctionSignatureError {}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/base.rs | crates/cast/src/base.rs | use alloy_primitives::{I256, Sign, U256, utils::ParseUnits};
use eyre::Result;
use std::{
convert::Infallible,
fmt::{Binary, Debug, Display, Formatter, LowerHex, Octal, Result as FmtResult, UpperHex},
num::IntErrorKind,
str::FromStr,
};
/* -------------------------------------------- Base -------------------------------------------- */
/// Represents a number's [radix] or base. Currently it supports the same bases that [std::fmt]
/// supports.
///
/// [radix]: https://en.wikipedia.org/wiki/Radix
#[repr(u32)]
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
pub enum Base {
Binary = 2,
Octal = 8,
#[default]
Decimal = 10,
Hexadecimal = 16,
}
impl Display for Base {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Display::fmt(&(*self as u32), f)
}
}
impl FromStr for Base {
type Err = eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"2" | "b" | "bin" | "binary" => Ok(Self::Binary),
"8" | "o" | "oct" | "octal" => Ok(Self::Octal),
"10" | "d" | "dec" | "decimal" => Ok(Self::Decimal),
"16" | "h" | "hex" | "hexadecimal" => Ok(Self::Hexadecimal),
s => Err(eyre::eyre!(
"\
Invalid base \"{s}\". Possible values:
2, b, bin, binary
8, o, oct, octal
10, d, dec, decimal
16, h, hex, hexadecimal"
)),
}
}
}
impl TryFrom<String> for Base {
type Error = eyre::Report;
fn try_from(s: String) -> Result<Self, Self::Error> {
Self::from_str(&s)
}
}
impl TryFrom<u32> for Base {
type Error = eyre::Report;
fn try_from(n: u32) -> Result<Self, Self::Error> {
match n {
2 => Ok(Self::Binary),
8 => Ok(Self::Octal),
10 => Ok(Self::Decimal),
16 => Ok(Self::Hexadecimal),
n => Err(eyre::eyre!("Invalid base \"{}\". Possible values: 2, 8, 10, 16", n)),
}
}
}
impl TryFrom<I256> for Base {
type Error = eyre::Report;
fn try_from(n: I256) -> Result<Self, Self::Error> {
Self::try_from(n.low_u32())
}
}
impl TryFrom<U256> for Base {
type Error = eyre::Report;
fn try_from(n: U256) -> Result<Self, Self::Error> {
Self::try_from(n.saturating_to::<u32>())
}
}
impl From<Base> for u32 {
fn from(b: Base) -> Self {
b as Self
}
}
impl From<Base> for String {
fn from(b: Base) -> Self {
b.to_string()
}
}
impl Base {
pub fn unwrap_or_detect(base: Option<&str>, s: impl AsRef<str>) -> Result<Self> {
match base {
Some(base) => base.parse(),
None => Self::detect(s),
}
}
/// Try parsing a number's base from a string.
pub fn detect(s: impl AsRef<str>) -> Result<Self> {
let s = s.as_ref();
match s {
// Ignore sign
_ if s.starts_with(['+', '-']) => Self::detect(&s[1..]),
// Verify binary and octal values with u128::from_str_radix as U256 does not support
// them;
// assume overflows are within u128::MAX and U256::MAX, we're not using the parsed value
// anyway;
// strip prefix when using u128::from_str_radix because it does not recognize it as
// valid.
_ if s.starts_with("0b") => match u64::from_str_radix(&s[2..], 2) {
Ok(_) => Ok(Self::Binary),
Err(e) => match e.kind() {
IntErrorKind::PosOverflow => Ok(Self::Binary),
_ => Err(eyre::eyre!("could not parse binary value: {}", e)),
},
},
_ if s.starts_with("0o") => match u64::from_str_radix(&s[2..], 8) {
Ok(_) => Ok(Self::Octal),
Err(e) => match e.kind() {
IntErrorKind::PosOverflow => Ok(Self::Octal),
_ => Err(eyre::eyre!("could not parse octal value: {e}")),
},
},
_ if s.starts_with("0x") => match u64::from_str_radix(&s[2..], 16) {
Ok(_) => Ok(Self::Hexadecimal),
Err(e) => match e.kind() {
IntErrorKind::PosOverflow => Ok(Self::Hexadecimal),
_ => Err(eyre::eyre!("could not parse hexadecimal value: {e}")),
},
},
// No prefix => first try parsing as decimal
_ => match U256::from_str_radix(s, 10) {
// Can be both, ambiguous but default to Decimal
Ok(_) => Ok(Self::Decimal),
Err(_) => match U256::from_str_radix(s, 16) {
Ok(_) => Ok(Self::Hexadecimal),
Err(e) => Err(eyre::eyre!(
"could not autodetect base as neither decimal or hexadecimal: {e}"
)),
},
},
}
}
/// Returns the Rust standard prefix for a base
pub const fn prefix(&self) -> &str {
match self {
Self::Binary => "0b",
Self::Octal => "0o",
Self::Decimal => "",
Self::Hexadecimal => "0x",
}
}
}
/* --------------------------------------- NumberWithBase --------------------------------------- */
/// Utility struct for parsing numbers and formatting them into different [bases][Base].
///
/// # Example
///
/// ```
/// use cast::base::NumberWithBase;
/// use alloy_primitives::U256;
///
/// let number: NumberWithBase = U256::from(12345).into();
/// assert_eq!(number.format(), "12345");
///
/// // Debug uses number.base() to determine which base to format to, which defaults to Base::Decimal
/// assert_eq!(format!("{:?}", number), "12345");
///
/// // Display uses Base::Decimal
/// assert_eq!(format!("{}", number), "12345");
///
/// // The alternate formatter ("#") prepends the base's prefix
/// assert_eq!(format!("{:x}", number), "3039");
/// assert_eq!(format!("{:#x}", number), "0x3039");
///
/// assert_eq!(format!("{:b}", number), "11000000111001");
/// assert_eq!(format!("{:#b}", number), "0b11000000111001");
///
/// assert_eq!(format!("{:o}", number), "30071");
/// assert_eq!(format!("{:#o}", number), "0o30071");
/// ```
#[derive(Clone, Copy)]
pub struct NumberWithBase {
/// The number.
number: U256,
/// Whether the number is positive or zero.
is_nonnegative: bool,
/// The base to format to.
base: Base,
}
impl std::ops::Deref for NumberWithBase {
type Target = U256;
fn deref(&self) -> &Self::Target {
&self.number
}
}
// Format using self.base
impl Debug for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
let prefix = self.base.prefix();
if self.number.is_zero() {
f.pad_integral(true, prefix, "0")
} else {
// Add sign only for decimal
let is_nonnegative = match self.base {
Base::Decimal => self.is_nonnegative,
_ => true,
};
f.pad_integral(is_nonnegative, prefix, &self.format())
}
}
}
impl Binary for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Debug::fmt(&self.with_base(Base::Binary), f)
}
}
impl Octal for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Debug::fmt(&self.with_base(Base::Octal), f)
}
}
impl Display for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Debug::fmt(&self.with_base(Base::Decimal), f)
}
}
impl LowerHex for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
Debug::fmt(&self.with_base(Base::Hexadecimal), f)
}
}
impl UpperHex for NumberWithBase {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
let n = format!("{self:x}").to_uppercase();
f.pad_integral(true, Base::Hexadecimal.prefix(), &n)
}
}
impl FromStr for NumberWithBase {
type Err = eyre::Report;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::parse_int(s, None)
}
}
impl From<I256> for NumberWithBase {
fn from(number: I256) -> Self {
// both is_positive and is_negative return false for 0
Self::new(number.into_raw(), !number.is_negative(), Base::default())
}
}
impl From<ParseUnits> for NumberWithBase {
fn from(value: ParseUnits) -> Self {
match value {
ParseUnits::U256(val) => val.into(),
ParseUnits::I256(val) => val.into(),
}
}
}
impl From<U256> for NumberWithBase {
fn from(number: U256) -> Self {
Self::new(number, true, Base::default())
}
}
impl From<NumberWithBase> for I256 {
fn from(n: NumberWithBase) -> Self {
Self::from_raw(n.number)
}
}
impl From<NumberWithBase> for U256 {
fn from(n: NumberWithBase) -> Self {
n.number
}
}
impl From<NumberWithBase> for String {
/// Formats the number into the specified base. See [NumberWithBase::format].
///
/// [NumberWithBase::format]: NumberWithBase
fn from(n: NumberWithBase) -> Self {
n.format()
}
}
impl NumberWithBase {
pub fn new(number: impl Into<U256>, is_nonnegative: bool, base: Base) -> Self {
Self { number: number.into(), is_nonnegative, base }
}
/// Creates a copy of the number with the provided base.
pub fn with_base(&self, base: Base) -> Self {
Self { number: self.number, is_nonnegative: self.is_nonnegative, base }
}
/// Parses a string slice into a signed integer. If base is None then it tries to determine base
/// from the prefix, otherwise defaults to Decimal.
pub fn parse_int(s: &str, base: Option<&str>) -> Result<Self> {
let base = Base::unwrap_or_detect(base, s)?;
let (number, is_nonnegative) = Self::_parse_int(s, base)?;
Ok(Self { number, is_nonnegative, base })
}
/// Parses a string slice into an unsigned integer. If base is None then it tries to determine
/// base from the prefix, otherwise defaults to Decimal.
pub fn parse_uint(s: &str, base: Option<&str>) -> Result<Self> {
let base = Base::unwrap_or_detect(base, s)?;
let number = Self::_parse_uint(s, base)?;
Ok(Self { number, is_nonnegative: true, base })
}
/// Returns a copy of the underlying number as an unsigned integer. If the value is negative
/// then the two's complement of its absolute value will be returned.
pub fn number(&self) -> U256 {
self.number
}
/// Returns whether the underlying number is positive or zero.
pub fn is_nonnegative(&self) -> bool {
self.is_nonnegative
}
/// Returns the underlying base. Defaults to [Decimal][Base].
pub fn base(&self) -> Base {
self.base
}
/// Returns the Rust standard prefix for the base.
pub const fn prefix(&self) -> &str {
self.base.prefix()
}
/// Sets the number's base to format to.
pub fn set_base(&mut self, base: Base) -> &mut Self {
self.base = base;
self
}
/// Formats the number into the specified base.
///
/// **Note**: this method only formats the number into the base, without adding any prefixes,
/// signs or padding. Refer to the [std::fmt] module documentation on how to format this
/// number with the aforementioned properties.
pub fn format(&self) -> String {
let s = match self.base {
Base::Binary => format!("{:b}", self.number),
Base::Octal => format!("{:o}", self.number),
Base::Decimal => {
if self.is_nonnegative {
self.number.to_string()
} else {
let s = I256::from_raw(self.number).to_string();
s.strip_prefix('-').unwrap_or(&s).to_string()
}
}
Base::Hexadecimal => format!("{:x}", self.number),
};
if s.starts_with('0') { s.trim_start_matches('0').to_string() } else { s }
}
fn _parse_int(s: &str, base: Base) -> Result<(U256, bool)> {
let (s, sign) = get_sign(s);
let mut n = Self::_parse_uint(s, base)?;
let is_neg = matches!(sign, Sign::Negative);
if is_neg {
n = (!n).overflowing_add(U256::from(1)).0;
}
Ok((n, !is_neg))
}
fn _parse_uint(s: &str, base: Base) -> Result<U256> {
let s = match s.get(0..2) {
Some("0x" | "0X" | "0o" | "0O" | "0b" | "0B") => &s[2..],
_ => s,
};
U256::from_str_radix(s, base as u64).map_err(Into::into)
}
}
/* ------------------------------------------- ToBase ------------------------------------------- */
/// Facilitates formatting an integer into a [Base].
pub trait ToBase {
type Err;
/// Formats self into a base, specifying whether to add the base prefix or not.
///
/// Tries converting `self` into a [NumberWithBase] and then formats into the provided base by
/// using the [Debug] implementation.
///
/// # Example
///
/// ```
/// use alloy_primitives::U256;
/// use cast::base::{Base, ToBase};
///
/// // Any type that implements ToBase
/// let number = U256::from(12345);
/// assert_eq!(number.to_base(Base::Decimal, false).unwrap(), "12345");
/// assert_eq!(number.to_base(Base::Hexadecimal, false).unwrap(), "3039");
/// assert_eq!(number.to_base(Base::Hexadecimal, true).unwrap(), "0x3039");
/// assert_eq!(number.to_base(Base::Binary, true).unwrap(), "0b11000000111001");
/// assert_eq!(number.to_base(Base::Octal, true).unwrap(), "0o30071");
/// ```
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err>;
}
impl ToBase for NumberWithBase {
type Err = Infallible;
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err> {
let n = self.with_base(base);
if add_prefix { Ok(format!("{n:#?}")) } else { Ok(format!("{n:?}")) }
}
}
impl ToBase for I256 {
type Err = Infallible;
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err> {
let n = NumberWithBase::from(*self).with_base(base);
if add_prefix { Ok(format!("{n:#?}")) } else { Ok(format!("{n:?}")) }
}
}
impl ToBase for U256 {
type Err = Infallible;
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err> {
let n = NumberWithBase::from(*self).with_base(base);
if add_prefix { Ok(format!("{n:#?}")) } else { Ok(format!("{n:?}")) }
}
}
impl ToBase for String {
type Err = eyre::Report;
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err> {
str::to_base(self, base, add_prefix)
}
}
impl ToBase for str {
type Err = eyre::Report;
fn to_base(&self, base: Base, add_prefix: bool) -> Result<String, Self::Err> {
let n = NumberWithBase::from_str(self)?.with_base(base);
if add_prefix { Ok(format!("{n:#?}")) } else { Ok(format!("{n:?}")) }
}
}
fn get_sign(s: &str) -> (&str, Sign) {
match s.as_bytes().first() {
Some(b'+') => (&s[1..], Sign::Positive),
Some(b'-') => (&s[1..], Sign::Negative),
_ => (s, Sign::Positive),
}
}
#[cfg(test)]
mod tests {
use super::*;
use Base::*;
const POS_NUM: [i128; 44] = [
1,
2,
3,
5,
7,
8,
10,
11,
13,
16,
17,
19,
23,
29,
31,
32,
37,
41,
43,
47,
53,
59,
61,
64,
67,
71,
73,
79,
83,
89,
97,
100,
128,
200,
333,
500,
666,
1000,
6666,
10000,
i16::MAX as i128,
i32::MAX as i128,
i64::MAX as i128,
i128::MAX,
];
const NEG_NUM: [i128; 44] = [
-1,
-2,
-3,
-5,
-7,
-8,
-10,
-11,
-13,
-16,
-17,
-19,
-23,
-29,
-31,
-32,
-37,
-41,
-43,
-47,
-53,
-59,
-61,
-64,
-67,
-71,
-73,
-79,
-83,
-89,
-97,
-100,
-128,
-200,
-333,
-500,
-666,
-1000,
-6666,
-10000,
i16::MIN as i128,
i32::MIN as i128,
i64::MIN as i128,
i128::MIN,
];
#[test]
fn test_defaults() {
let def: Base = Default::default();
assert!(matches!(def, Decimal));
let n: NumberWithBase = U256::ZERO.into();
assert!(matches!(n.base, Decimal));
let n: NumberWithBase = I256::ZERO.into();
assert!(matches!(n.base, Decimal));
}
#[test]
fn can_parse_base() {
assert_eq!("2".parse::<Base>().unwrap(), Binary);
assert_eq!("b".parse::<Base>().unwrap(), Binary);
assert_eq!("bin".parse::<Base>().unwrap(), Binary);
assert_eq!("binary".parse::<Base>().unwrap(), Binary);
assert_eq!("8".parse::<Base>().unwrap(), Octal);
assert_eq!("o".parse::<Base>().unwrap(), Octal);
assert_eq!("oct".parse::<Base>().unwrap(), Octal);
assert_eq!("octal".parse::<Base>().unwrap(), Octal);
assert_eq!("10".parse::<Base>().unwrap(), Decimal);
assert_eq!("d".parse::<Base>().unwrap(), Decimal);
assert_eq!("dec".parse::<Base>().unwrap(), Decimal);
assert_eq!("decimal".parse::<Base>().unwrap(), Decimal);
assert_eq!("16".parse::<Base>().unwrap(), Hexadecimal);
assert_eq!("h".parse::<Base>().unwrap(), Hexadecimal);
assert_eq!("hex".parse::<Base>().unwrap(), Hexadecimal);
assert_eq!("hexadecimal".parse::<Base>().unwrap(), Hexadecimal);
}
#[test]
fn can_detect_base() {
assert_eq!(Base::detect("0b100").unwrap(), Binary);
assert_eq!(Base::detect("0o100").unwrap(), Octal);
assert_eq!(Base::detect("100").unwrap(), Decimal);
assert_eq!(Base::detect("0x100").unwrap(), Hexadecimal);
assert_eq!(Base::detect("0123456789abcdef").unwrap(), Hexadecimal);
let _ = Base::detect("0b234abc").unwrap_err();
let _ = Base::detect("0o89cba").unwrap_err();
let _ = Base::detect("0123456789abcdefg").unwrap_err();
let _ = Base::detect("0x123abclpmk").unwrap_err();
let _ = Base::detect("hello world").unwrap_err();
}
#[test]
fn test_format_pos() {
let expected_2: Vec<_> = POS_NUM.iter().map(|n| format!("{n:b}")).collect();
let expected_8: Vec<_> = POS_NUM.iter().map(|n| format!("{n:o}")).collect();
let expected_10: Vec<_> = POS_NUM.iter().map(|n| format!("{n:}")).collect();
let expected_l16: Vec<_> = POS_NUM.iter().map(|n| format!("{n:x}")).collect();
let expected_u16: Vec<_> = POS_NUM.iter().map(|n| format!("{n:X}")).collect();
for (i, n) in POS_NUM.into_iter().enumerate() {
let mut num: NumberWithBase = I256::try_from(n).unwrap().into();
assert_eq!(num.set_base(Binary).format(), expected_2[i]);
assert_eq!(num.set_base(Octal).format(), expected_8[i]);
assert_eq!(num.set_base(Decimal).format(), expected_10[i]);
assert_eq!(num.set_base(Hexadecimal).format(), expected_l16[i]);
assert_eq!(num.set_base(Hexadecimal).format().to_uppercase(), expected_u16[i]);
}
}
#[test]
fn test_format_neg() {
// underlying is 256 bits so we have to pad left manually
let expected_2: Vec<_> = NEG_NUM.iter().map(|n| format!("{n:1>256b}")).collect();
let expected_8: Vec<_> = NEG_NUM
.iter()
.map(|n| {
let i = I256::try_from(*n).unwrap();
let mut u = NumberWithBase::from(i);
u.set_base(Octal);
u.format()
})
.collect();
// Sign not included, see NumberWithBase::format
let expected_10: Vec<_> =
NEG_NUM.iter().map(|n| format!("{n:}").trim_matches('-').to_string()).collect();
let expected_l16: Vec<_> = NEG_NUM.iter().map(|n| format!("{n:f>64x}")).collect();
let expected_u16: Vec<_> = NEG_NUM.iter().map(|n| format!("{n:F>64X}")).collect();
for (i, n) in NEG_NUM.into_iter().enumerate() {
let mut num: NumberWithBase = I256::try_from(n).unwrap().into();
assert_eq!(num.set_base(Binary).format(), expected_2[i]);
assert_eq!(num.set_base(Octal).format(), expected_8[i]);
assert_eq!(num.set_base(Decimal).format(), expected_10[i]);
assert_eq!(num.set_base(Hexadecimal).format(), expected_l16[i]);
assert_eq!(num.set_base(Hexadecimal).format().to_uppercase(), expected_u16[i]);
}
}
#[test]
fn test_fmt_macro() {
let nums: Vec<_> =
POS_NUM.into_iter().map(|n| NumberWithBase::from(I256::try_from(n).unwrap())).collect();
let actual_2: Vec<_> = nums.iter().map(|n| format!("{n:b}")).collect();
let actual_2_alt: Vec<_> = nums.iter().map(|n| format!("{n:#b}")).collect();
let actual_8: Vec<_> = nums.iter().map(|n| format!("{n:o}")).collect();
let actual_8_alt: Vec<_> = nums.iter().map(|n| format!("{n:#o}")).collect();
let actual_10: Vec<_> = nums.iter().map(|n| format!("{n:}")).collect();
let actual_10_alt: Vec<_> = nums.iter().map(|n| format!("{n:#}")).collect();
let actual_l16: Vec<_> = nums.iter().map(|n| format!("{n:x}")).collect();
let actual_l16_alt: Vec<_> = nums.iter().map(|n| format!("{n:#x}")).collect();
let actual_u16: Vec<_> = nums.iter().map(|n| format!("{n:X}")).collect();
let actual_u16_alt: Vec<_> = nums.iter().map(|n| format!("{n:#X}")).collect();
let expected_2: Vec<_> = POS_NUM.iter().map(|n| format!("{n:b}")).collect();
let expected_2_alt: Vec<_> = POS_NUM.iter().map(|n| format!("{n:#b}")).collect();
let expected_8: Vec<_> = POS_NUM.iter().map(|n| format!("{n:o}")).collect();
let expected_8_alt: Vec<_> = POS_NUM.iter().map(|n| format!("{n:#o}")).collect();
let expected_10: Vec<_> = POS_NUM.iter().map(|n| format!("{n:}")).collect();
let expected_10_alt: Vec<_> = POS_NUM.iter().map(|n| format!("{n:#}")).collect();
let expected_l16: Vec<_> = POS_NUM.iter().map(|n| format!("{n:x}")).collect();
let expected_l16_alt: Vec<_> = POS_NUM.iter().map(|n| format!("{n:#x}")).collect();
let expected_u16: Vec<_> = POS_NUM.iter().map(|n| format!("{n:X}")).collect();
let expected_u16_alt: Vec<_> = POS_NUM.iter().map(|n| format!("{n:#X}")).collect();
for (i, _) in POS_NUM.iter().enumerate() {
assert_eq!(actual_2[i], expected_2[i]);
assert_eq!(actual_2_alt[i], expected_2_alt[i]);
assert_eq!(actual_8[i], expected_8[i]);
assert_eq!(actual_8_alt[i], expected_8_alt[i]);
assert_eq!(actual_10[i], expected_10[i]);
assert_eq!(actual_10_alt[i], expected_10_alt[i]);
assert_eq!(actual_l16[i], expected_l16[i]);
assert_eq!(actual_l16_alt[i], expected_l16_alt[i]);
assert_eq!(actual_u16[i], expected_u16[i]);
assert_eq!(actual_u16_alt[i], expected_u16_alt[i]);
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/lib.rs | crates/cast/src/lib.rs | //! Cast is a Swiss Army knife for interacting with Ethereum applications from the command line.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate foundry_common;
#[macro_use]
extern crate tracing;
use alloy_consensus::Header;
use alloy_dyn_abi::{DynSolType, DynSolValue, FunctionExt};
use alloy_ens::NameOrAddress;
use alloy_json_abi::Function;
use alloy_network::{AnyNetwork, AnyRpcTransaction};
use alloy_primitives::{
Address, B256, I256, Keccak256, LogData, Selector, TxHash, TxKind, U64, U256, hex,
utils::{ParseUnits, Unit, keccak256},
};
use alloy_provider::{
PendingTransactionBuilder, Provider,
network::eip2718::{Decodable2718, Encodable2718},
};
use alloy_rlp::Decodable;
use alloy_rpc_types::{
BlockId, BlockNumberOrTag, BlockOverrides, Filter, FilterBlockOption, Log, TransactionRequest,
state::StateOverride,
};
use alloy_serde::WithOtherFields;
use base::{Base, NumberWithBase, ToBase};
use chrono::DateTime;
use eyre::{Context, ContextCompat, OptionExt, Result};
use foundry_block_explorers::Client;
use foundry_common::{
abi::{coerce_value, encode_function_args, encode_function_args_packed, get_event, get_func},
compile::etherscan_project,
flatten,
fmt::*,
fs, shell,
};
use foundry_config::Chain;
use foundry_evm::core::bytecode::InstIter;
use foundry_primitives::FoundryTxEnvelope;
use futures::{FutureExt, StreamExt, future::Either};
use rayon::prelude::*;
use std::{
borrow::Cow,
fmt::Write,
io,
path::PathBuf,
str::FromStr,
sync::atomic::{AtomicBool, Ordering},
};
use tokio::signal::ctrl_c;
pub use foundry_evm::*;
pub mod args;
pub mod cmd;
pub mod opts;
pub mod base;
pub(crate) mod debug;
pub mod errors;
mod rlp_converter;
pub mod tx;
use rlp_converter::Item;
// TODO: CastContract with common contract initializers? Same for CastProviders?
pub struct Cast<P> {
provider: P,
}
impl<P: Provider<AnyNetwork> + Clone + Unpin> Cast<P> {
/// Creates a new Cast instance from the provided client
///
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// # Ok(())
/// # }
/// ```
pub fn new(provider: P) -> Self {
Self { provider }
}
/// Makes a read-only call to the specified address
///
/// # Example
///
/// ```
/// use alloy_primitives::{Address, U256, Bytes};
/// use alloy_rpc_types::{TransactionRequest, BlockOverrides, state::{StateOverride, AccountOverride}};
/// use alloy_serde::WithOtherFields;
/// use cast::Cast;
/// use alloy_provider::{RootProvider, ProviderBuilder, network::AnyNetwork};
/// use std::{str::FromStr, collections::HashMap};
/// use alloy_rpc_types::state::StateOverridesBuilder;
/// use alloy_sol_types::{sol, SolCall};
///
/// sol!(
/// function greeting(uint256 i) public returns (string);
/// );
///
/// # async fn foo() -> eyre::Result<()> {
/// let alloy_provider = ProviderBuilder::<_,_, AnyNetwork>::default().connect("http://localhost:8545").await?;;
/// let to = Address::from_str("0xB3C95ff08316fb2F2e3E52Ee82F8e7b605Aa1304")?;
/// let greeting = greetingCall { i: U256::from(5) }.abi_encode();
/// let bytes = Bytes::from_iter(greeting.iter());
/// let tx = TransactionRequest::default().to(to).input(bytes.into());
/// let tx = WithOtherFields::new(tx);
///
/// // Create state overrides
/// let mut state_override = StateOverride::default();
/// let mut account_override = AccountOverride::default();
/// account_override.balance = Some(U256::from(1000));
/// state_override.insert(to, account_override);
/// let state_override_object = StateOverridesBuilder::default().build();
/// let block_override_object = BlockOverrides::default();
///
/// let cast = Cast::new(alloy_provider);
/// let data = cast.call(&tx, None, None, Some(state_override_object), Some(block_override_object)).await?;
/// println!("{}", data);
/// # Ok(())
/// # }
/// ```
pub async fn call(
&self,
req: &WithOtherFields<TransactionRequest>,
func: Option<&Function>,
block: Option<BlockId>,
state_override: Option<StateOverride>,
block_override: Option<BlockOverrides>,
) -> Result<String> {
let mut call = self
.provider
.call(req.clone())
.block(block.unwrap_or_default())
.with_block_overrides_opt(block_override);
if let Some(state_override) = state_override {
call = call.overrides(state_override)
}
let res = call.await?;
let mut decoded = vec![];
if let Some(func) = func {
// decode args into tokens
decoded = match func.abi_decode_output(res.as_ref()) {
Ok(decoded) => decoded,
Err(err) => {
// ensure the address is a contract
if res.is_empty() {
// check that the recipient is a contract that can be called
if let Some(TxKind::Call(addr)) = req.to {
if let Ok(code) = self
.provider
.get_code_at(addr)
.block_id(block.unwrap_or_default())
.await
&& code.is_empty()
{
eyre::bail!("contract {addr:?} does not have any code")
}
} else if Some(TxKind::Create) == req.to {
eyre::bail!("tx req is a contract deployment");
} else {
eyre::bail!("recipient is None");
}
}
return Err(err).wrap_err(
"could not decode output; did you specify the wrong function return data type?"
);
}
};
}
// handle case when return type is not specified
Ok(if decoded.is_empty() {
res.to_string()
} else if shell::is_json() {
let tokens = decoded
.into_iter()
.map(|value| serialize_value_as_json(value, None))
.collect::<eyre::Result<Vec<_>>>()?;
serde_json::to_string_pretty(&tokens).unwrap()
} else {
// seth compatible user-friendly return type conversions
decoded.iter().map(format_token).collect::<Vec<_>>().join("\n")
})
}
/// Generates an access list for the specified transaction
///
/// # Example
///
/// ```
/// use cast::{Cast};
/// use alloy_primitives::{Address, U256, Bytes};
/// use alloy_rpc_types::{TransactionRequest};
/// use alloy_serde::WithOtherFields;
/// use alloy_provider::{RootProvider, ProviderBuilder, network::AnyNetwork};
/// use std::str::FromStr;
/// use alloy_sol_types::{sol, SolCall};
///
/// sol!(
/// function greeting(uint256 i) public returns (string);
/// );
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider = ProviderBuilder::<_,_, AnyNetwork>::default().connect("http://localhost:8545").await?;;
/// let to = Address::from_str("0xB3C95ff08316fb2F2e3E52Ee82F8e7b605Aa1304")?;
/// let greeting = greetingCall { i: U256::from(5) }.abi_encode();
/// let bytes = Bytes::from_iter(greeting.iter());
/// let tx = TransactionRequest::default().to(to).input(bytes.into());
/// let tx = WithOtherFields::new(tx);
/// let cast = Cast::new(&provider);
/// let access_list = cast.access_list(&tx, None).await?;
/// println!("{}", access_list);
/// # Ok(())
/// # }
/// ```
pub async fn access_list(
&self,
req: &WithOtherFields<TransactionRequest>,
block: Option<BlockId>,
) -> Result<String> {
let access_list =
self.provider.create_access_list(req).block_id(block.unwrap_or_default()).await?;
let res = if shell::is_json() {
serde_json::to_string(&access_list)?
} else {
let mut s =
vec![format!("gas used: {}", access_list.gas_used), "access list:".to_string()];
for al in access_list.access_list.0 {
s.push(format!("- address: {}", &al.address.to_checksum(None)));
if !al.storage_keys.is_empty() {
s.push(" keys:".to_string());
for key in al.storage_keys {
s.push(format!(" {key:?}"));
}
}
}
s.join("\n")
};
Ok(res)
}
pub async fn balance(&self, who: Address, block: Option<BlockId>) -> Result<U256> {
Ok(self.provider.get_balance(who).block_id(block.unwrap_or_default()).await?)
}
/// Publishes a raw transaction to the network
///
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let res = cast.publish("0x1234".to_string()).await?;
/// println!("{:?}", res);
/// # Ok(())
/// # }
/// ```
pub async fn publish(&self, raw_tx: String) -> Result<PendingTransactionBuilder<AnyNetwork>> {
let tx = hex::decode(strip_0x(&raw_tx))?;
let res = self.provider.send_raw_transaction(&tx).await?;
Ok(res)
}
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let block = cast.block(5, true, vec![], false).await?;
/// println!("{}", block);
/// # Ok(())
/// # }
/// ```
pub async fn block<B: Into<BlockId>>(
&self,
block: B,
full: bool,
fields: Vec<String>,
raw: bool,
) -> Result<String> {
let block = block.into();
if fields.contains(&"transactions".into()) && !full {
eyre::bail!("use --full to view transactions")
}
let block = self
.provider
.get_block(block)
.kind(full.into())
.await?
.ok_or_else(|| eyre::eyre!("block {:?} not found", block))?;
Ok(if raw {
let header: Header = block.into_inner().header.inner.try_into_header()?;
format!("0x{}", hex::encode(alloy_rlp::encode(&header)))
} else if !fields.is_empty() {
let mut result = String::new();
for field in fields {
result.push_str(
&get_pretty_block_attr(&block, &field)
.unwrap_or_else(|| format!("{field} is not a valid block field")),
);
result.push('\n');
}
result.trim_end().to_string()
} else if shell::is_json() {
serde_json::to_value(&block).unwrap().to_string()
} else {
block.pretty()
})
}
async fn block_field_as_num<B: Into<BlockId>>(&self, block: B, field: String) -> Result<U256> {
Self::block(
self,
block.into(),
false,
// Select only select field
vec![field],
false,
)
.await?
.parse()
.map_err(Into::into)
}
pub async fn base_fee<B: Into<BlockId>>(&self, block: B) -> Result<U256> {
Self::block_field_as_num(self, block, String::from("baseFeePerGas")).await
}
pub async fn age<B: Into<BlockId>>(&self, block: B) -> Result<String> {
let timestamp_str =
Self::block_field_as_num(self, block, String::from("timestamp")).await?.to_string();
let datetime = DateTime::from_timestamp(timestamp_str.parse::<i64>().unwrap(), 0).unwrap();
Ok(datetime.format("%a %b %e %H:%M:%S %Y").to_string())
}
pub async fn timestamp<B: Into<BlockId>>(&self, block: B) -> Result<U256> {
Self::block_field_as_num(self, block, "timestamp".to_string()).await
}
pub async fn chain(&self) -> Result<&str> {
let genesis_hash = Self::block(
self,
0,
false,
// Select only block hash
vec![String::from("hash")],
false,
)
.await?;
Ok(match &genesis_hash[..] {
"0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" => {
match &(Self::block(self, 1920000, false, vec![String::from("hash")], false)
.await?)[..]
{
"0x94365e3a8c0b35089c1d1195081fe7489b528a84b22199c916180db8b28ade7f" => {
"etclive"
}
_ => "ethlive",
}
}
"0xa3c565fc15c7478862d50ccd6561e3c06b24cc509bf388941c25ea985ce32cb9" => "kovan",
"0x41941023680923e0fe4d74a34bdac8141f2540e3ae90623718e47d66d1ca4a2d" => "ropsten",
"0x7ca38a1916c42007829c55e69d3e9a73265554b586a499015373241b8a3fa48b" => {
"optimism-mainnet"
}
"0xc1fc15cd51159b1f1e5cbc4b82e85c1447ddfa33c52cf1d98d14fba0d6354be1" => {
"optimism-goerli"
}
"0x02adc9b449ff5f2467b8c674ece7ff9b21319d76c4ad62a67a70d552655927e5" => {
"optimism-kovan"
}
"0x521982bd54239dc71269eefb58601762cc15cfb2978e0becb46af7962ed6bfaa" => "fraxtal",
"0x910f5c4084b63fd860d0c2f9a04615115a5a991254700b39ba072290dbd77489" => {
"fraxtal-testnet"
}
"0x7ee576b35482195fc49205cec9af72ce14f003b9ae69f6ba0faef4514be8b442" => {
"arbitrum-mainnet"
}
"0x0cd786a2425d16f152c658316c423e6ce1181e15c3295826d7c9904cba9ce303" => "morden",
"0x6341fd3daf94b748c72ced5a5b26028f2474f5f00d824504e4fa37a75767e177" => "rinkeby",
"0xbf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a" => "goerli",
"0x14c2283285a88fe5fce9bf5c573ab03d6616695d717b12a127188bcacfc743c4" => "kotti",
"0xa9c28ce2141b56c474f1dc504bee9b01eb1bd7d1a507580d5519d4437a97de1b" => "polygon-pos",
"0x7202b2b53c5a0836e773e319d18922cc756dd67432f9a1f65352b61f4406c697" => {
"polygon-pos-amoy-testnet"
}
"0x81005434635456a16f74ff7023fbe0bf423abbc8a8deb093ffff455c0ad3b741" => "polygon-zkevm",
"0x676c1a76a6c5855a32bdf7c61977a0d1510088a4eeac1330466453b3d08b60b9" => {
"polygon-zkevm-cardona-testnet"
}
"0x4f1dd23188aab3a76b463e4af801b52b1248ef073c648cbdc4c9333d3da79756" => "gnosis",
"0xada44fd8d2ecab8b08f256af07ad3e777f17fb434f8f8e678b312f576212ba9a" => "chiado",
"0x6d3c66c5357ec91d5c43af47e234a939b22557cbb552dc45bebbceeed90fbe34" => "bsctest",
"0x0d21840abff46b96c84b2ac9e10e4f5cdaeb5693cb665db62a2f3b02d2d57b5b" => "bsc",
"0x31ced5b9beb7f8782b014660da0cb18cc409f121f408186886e1ca3e8eeca96b" => {
match &(Self::block(self, 1, false, vec![String::from("hash")], false).await?)[..] {
"0x738639479dc82d199365626f90caa82f7eafcfe9ed354b456fb3d294597ceb53" => {
"avalanche-fuji"
}
_ => "avalanche",
}
}
"0x23a2658170ba70d014ba0d0d2709f8fbfe2fa660cd868c5f282f991eecbe38ee" => "ink",
"0xe5fd5cf0be56af58ad5751b401410d6b7a09d830fa459789746a3d0dd1c79834" => "ink-sepolia",
_ => "unknown",
})
}
pub async fn chain_id(&self) -> Result<u64> {
Ok(self.provider.get_chain_id().await?)
}
pub async fn block_number(&self) -> Result<u64> {
Ok(self.provider.get_block_number().await?)
}
pub async fn gas_price(&self) -> Result<u128> {
Ok(self.provider.get_gas_price().await?)
}
/// # Example
///
/// ```
/// use alloy_primitives::Address;
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let nonce = cast.nonce(addr, None).await?;
/// println!("{}", nonce);
/// # Ok(())
/// # }
/// ```
pub async fn nonce(&self, who: Address, block: Option<BlockId>) -> Result<u64> {
Ok(self.provider.get_transaction_count(who).block_id(block.unwrap_or_default()).await?)
}
/// #Example
///
/// ```
/// use alloy_primitives::{Address, FixedBytes};
/// use alloy_provider::{network::AnyNetwork, ProviderBuilder, RootProvider};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let slots = vec![FixedBytes::from_str("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")?];
/// let codehash = cast.codehash(addr, slots, None).await?;
/// println!("{}", codehash);
/// # Ok(())
/// # }
pub async fn codehash(
&self,
who: Address,
slots: Vec<B256>,
block: Option<BlockId>,
) -> Result<String> {
Ok(self
.provider
.get_proof(who, slots)
.block_id(block.unwrap_or_default())
.await?
.code_hash
.to_string())
}
/// #Example
///
/// ```
/// use alloy_primitives::{Address, FixedBytes};
/// use alloy_provider::{network::AnyNetwork, ProviderBuilder, RootProvider};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let slots = vec![FixedBytes::from_str("0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")?];
/// let storage_root = cast.storage_root(addr, slots, None).await?;
/// println!("{}", storage_root);
/// # Ok(())
/// # }
pub async fn storage_root(
&self,
who: Address,
slots: Vec<B256>,
block: Option<BlockId>,
) -> Result<String> {
Ok(self
.provider
.get_proof(who, slots)
.block_id(block.unwrap_or_default())
.await?
.storage_hash
.to_string())
}
/// # Example
///
/// ```
/// use alloy_primitives::Address;
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let implementation = cast.implementation(addr, false, None).await?;
/// println!("{}", implementation);
/// # Ok(())
/// # }
/// ```
pub async fn implementation(
&self,
who: Address,
is_beacon: bool,
block: Option<BlockId>,
) -> Result<String> {
let slot = match is_beacon {
true => {
// Use the beacon slot : bytes32(uint256(keccak256('eip1967.proxy.beacon')) - 1)
B256::from_str(
"0xa3f0ad74e5423aebfd80d3ef4346578335a9a72aeaee59ff6cb3582b35133d50",
)?
}
false => {
// Use the implementation slot :
// bytes32(uint256(keccak256('eip1967.proxy.implementation')) - 1)
B256::from_str(
"0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc",
)?
}
};
let value = self
.provider
.get_storage_at(who, slot.into())
.block_id(block.unwrap_or_default())
.await?;
let addr = Address::from_word(value.into());
Ok(format!("{addr:?}"))
}
/// # Example
///
/// ```
/// use alloy_primitives::Address;
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let admin = cast.admin(addr, None).await?;
/// println!("{}", admin);
/// # Ok(())
/// # }
/// ```
pub async fn admin(&self, who: Address, block: Option<BlockId>) -> Result<String> {
let slot =
B256::from_str("0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103")?;
let value = self
.provider
.get_storage_at(who, slot.into())
.block_id(block.unwrap_or_default())
.await?;
let addr = Address::from_word(value.into());
Ok(format!("{addr:?}"))
}
/// # Example
///
/// ```
/// use alloy_primitives::{Address, U256};
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("7eD52863829AB99354F3a0503A622e82AcD5F7d3")?;
/// let computed_address = cast.compute_address(addr, None).await?;
/// println!("Computed address for address {addr}: {computed_address}");
/// # Ok(())
/// # }
/// ```
pub async fn compute_address(&self, address: Address, nonce: Option<u64>) -> Result<Address> {
let unpacked = if let Some(n) = nonce { n } else { self.nonce(address, None).await? };
Ok(address.create(unpacked))
}
/// # Example
///
/// ```
/// use alloy_primitives::Address;
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x00000000219ab540356cbb839cbe05303d7705fa")?;
/// let code = cast.code(addr, None, false).await?;
/// println!("{}", code);
/// # Ok(())
/// # }
/// ```
pub async fn code(
&self,
who: Address,
block: Option<BlockId>,
disassemble: bool,
) -> Result<String> {
if disassemble {
let code =
self.provider.get_code_at(who).block_id(block.unwrap_or_default()).await?.to_vec();
SimpleCast::disassemble(&code)
} else {
Ok(format!(
"{}",
self.provider.get_code_at(who).block_id(block.unwrap_or_default()).await?
))
}
}
/// Example
///
/// ```
/// use alloy_primitives::Address;
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x00000000219ab540356cbb839cbe05303d7705fa")?;
/// let codesize = cast.codesize(addr, None).await?;
/// println!("{}", codesize);
/// # Ok(())
/// # }
/// ```
pub async fn codesize(&self, who: Address, block: Option<BlockId>) -> Result<String> {
let code =
self.provider.get_code_at(who).block_id(block.unwrap_or_default()).await?.to_vec();
Ok(code.len().to_string())
}
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let tx_hash = "0xf8d1713ea15a81482958fb7ddf884baee8d3bcc478c5f2f604e008dc788ee4fc";
/// let tx = cast.transaction(Some(tx_hash.to_string()), None, None, None, false, false).await?;
/// println!("{}", tx);
/// # Ok(())
/// # }
/// ```
pub async fn transaction(
&self,
tx_hash: Option<String>,
from: Option<NameOrAddress>,
nonce: Option<u64>,
field: Option<String>,
raw: bool,
to_request: bool,
) -> Result<String> {
let tx = if let Some(tx_hash) = tx_hash {
let tx_hash = TxHash::from_str(&tx_hash).wrap_err("invalid tx hash")?;
self.provider
.get_transaction_by_hash(tx_hash)
.await?
.ok_or_else(|| eyre::eyre!("tx not found: {:?}", tx_hash))?
} else if let Some(from) = from {
// If nonce is not provided, uses 0.
let nonce = U64::from(nonce.unwrap_or_default());
let from = from.resolve(self.provider.root()).await?;
self.provider
.raw_request::<_, Option<AnyRpcTransaction>>(
"eth_getTransactionBySenderAndNonce".into(),
(from, nonce),
)
.await?
.ok_or_else(|| {
eyre::eyre!("tx not found for sender {from} and nonce {:?}", nonce.to::<u64>())
})?
} else {
eyre::bail!("tx hash or from address is required")
};
Ok(if raw {
// convert to FoundryTxEnvelope to support all foundry tx types (including opstack
// deposit transactions)
let foundry_tx = FoundryTxEnvelope::try_from(tx)?;
let encoded = foundry_tx.encoded_2718();
format!("0x{}", hex::encode(encoded))
} else if let Some(ref field) = field {
get_pretty_tx_attr(&tx.inner, field.as_str())
.ok_or_else(|| eyre::eyre!("invalid tx field: {}", field.to_string()))?
} else if shell::is_json() {
// to_value first to sort json object keys
serde_json::to_value(&tx)?.to_string()
} else if to_request {
serde_json::to_string_pretty(&TransactionRequest::from_recovered_transaction(
tx.into(),
))?
} else {
tx.pretty()
})
}
/// Perform a raw JSON-RPC request
///
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let result = cast
/// .rpc("eth_getBalance", &["0xc94770007dda54cF92009BFF0dE90c06F603a09f", "latest"])
/// .await?;
/// println!("{}", result);
/// # Ok(())
/// # }
/// ```
pub async fn rpc<V>(&self, method: &str, params: V) -> Result<String>
where
V: alloy_json_rpc::RpcSend,
{
let res = self
.provider
.raw_request::<V, serde_json::Value>(Cow::Owned(method.to_string()), params)
.await?;
Ok(serde_json::to_string(&res)?)
}
/// Returns the slot
///
/// # Example
///
/// ```
/// use alloy_primitives::{Address, B256};
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::Cast;
/// use std::str::FromStr;
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = Cast::new(provider);
/// let addr = Address::from_str("0x00000000006c3852cbEf3e08E8dF289169EdE581")?;
/// let slot = B256::ZERO;
/// let storage = cast.storage(addr, slot, None).await?;
/// println!("{}", storage);
/// # Ok(())
/// # }
/// ```
pub async fn storage(
&self,
from: Address,
slot: B256,
block: Option<BlockId>,
) -> Result<String> {
Ok(format!(
"{:?}",
B256::from(
self.provider
.get_storage_at(from, slot.into())
.block_id(block.unwrap_or_default())
.await?
)
))
}
pub async fn filter_logs(&self, filter: Filter) -> Result<String> {
let logs = self.provider.get_logs(&filter).await?;
Self::format_logs(logs)
}
/// Retrieves logs using chunked requests to handle large block ranges.
///
/// Automatically divides large block ranges into smaller chunks to avoid provider limits
/// and processes them with controlled concurrency to prevent rate limiting.
pub async fn filter_logs_chunked(&self, filter: Filter, chunk_size: u64) -> Result<String> {
let logs = self.get_logs_chunked(&filter, chunk_size).await?;
Self::format_logs(logs)
}
fn format_logs(logs: Vec<Log>) -> Result<String> {
let res = if shell::is_json() {
serde_json::to_string(&logs)?
} else {
let mut s = vec![];
for log in logs {
let pretty = log
.pretty()
.replacen('\n', "- ", 1) // Remove empty first line
.replace('\n', "\n "); // Indent
s.push(pretty);
}
s.join("\n")
};
Ok(res)
}
fn extract_block_range(filter: &Filter) -> (Option<u64>, Option<u64>) {
let FilterBlockOption::Range { from_block, to_block } = &filter.block_option else {
return (None, None);
};
(from_block.and_then(|b| b.as_number()), to_block.and_then(|b| b.as_number()))
}
/// Retrieves logs with automatic chunking fallback.
///
/// First tries to fetch logs for the entire range. If that fails,
/// falls back to concurrent chunked requests with rate limiting.
async fn get_logs_chunked(&self, filter: &Filter, chunk_size: u64) -> Result<Vec<Log>>
where
P: Clone + Unpin,
{
// Try the full range first
if let Ok(logs) = self.provider.get_logs(filter).await {
return Ok(logs);
}
// Fallback: use concurrent chunked approach
self.get_logs_chunked_concurrent(filter, chunk_size).await
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/args.rs | crates/cast/src/args.rs | use crate::{
Cast, SimpleCast,
cmd::erc20::IERC20,
opts::{Cast as CastArgs, CastSubcommand, ToBaseArgs},
traces::identifier::SignaturesIdentifier,
tx::CastTxSender,
};
use alloy_consensus::transaction::Recovered;
use alloy_dyn_abi::{DynSolValue, ErrorExt, EventExt};
use alloy_eips::eip7702::SignedAuthorization;
use alloy_ens::{ProviderEnsExt, namehash};
use alloy_primitives::{Address, B256, eip191_hash_message, hex, keccak256};
use alloy_provider::Provider;
use alloy_rpc_types::{BlockId, BlockNumberOrTag::Latest};
use clap::{CommandFactory, Parser};
use clap_complete::generate;
use eyre::Result;
use foundry_cli::{utils, utils::LoadConfig};
use foundry_common::{
abi::{get_error, get_event},
fmt::{format_tokens, format_uint_exp, serialize_value_as_json},
fs,
selectors::{
ParsedSignatures, SelectorImportData, SelectorKind, decode_calldata, decode_event_topic,
decode_function_selector, decode_selectors, import_selectors, parse_signatures,
pretty_calldata,
},
shell, stdin,
};
use std::time::Instant;
/// Run the `cast` command-line interface.
pub fn run() -> Result<()> {
setup()?;
let args = CastArgs::parse();
args.global.init()?;
args.global.tokio_runtime().block_on(run_command(args))
}
/// Setup the global logger and other utilities.
pub fn setup() -> Result<()> {
utils::common_setup();
utils::subscriber();
Ok(())
}
/// Run the subcommand.
pub async fn run_command(args: CastArgs) -> Result<()> {
match args.cmd {
// Constants
CastSubcommand::MaxInt { r#type } => {
sh_println!("{}", SimpleCast::max_int(&r#type)?)?;
}
CastSubcommand::MinInt { r#type } => {
sh_println!("{}", SimpleCast::min_int(&r#type)?)?;
}
CastSubcommand::MaxUint { r#type } => {
sh_println!("{}", SimpleCast::max_int(&r#type)?)?;
}
CastSubcommand::AddressZero => {
sh_println!("{:?}", Address::ZERO)?;
}
CastSubcommand::HashZero => {
sh_println!("{:?}", B256::ZERO)?;
}
// Conversions & transformations
CastSubcommand::FromUtf8 { text } => {
let value = stdin::unwrap(text, false)?;
sh_println!("{}", SimpleCast::from_utf8(&value))?
}
CastSubcommand::ToAscii { hexdata } => {
let value = stdin::unwrap(hexdata, false)?;
sh_println!("{}", SimpleCast::to_ascii(value.trim())?)?
}
CastSubcommand::ToUtf8 { hexdata } => {
let value = stdin::unwrap(hexdata, false)?;
sh_println!("{}", SimpleCast::to_utf8(&value)?)?
}
CastSubcommand::FromFixedPoint { value, decimals } => {
let (value, decimals) = stdin::unwrap2(value, decimals)?;
sh_println!("{}", SimpleCast::from_fixed_point(&value, &decimals)?)?
}
CastSubcommand::ToFixedPoint { value, decimals } => {
let (value, decimals) = stdin::unwrap2(value, decimals)?;
sh_println!("{}", SimpleCast::to_fixed_point(&value, &decimals)?)?
}
CastSubcommand::ConcatHex { data } => {
if data.is_empty() {
let s = stdin::read(true)?;
sh_println!("{}", SimpleCast::concat_hex(s.split_whitespace()))?
} else {
sh_println!("{}", SimpleCast::concat_hex(data))?
}
}
CastSubcommand::FromBin => {
let hex = stdin::read_bytes(false)?;
sh_println!("{}", hex::encode_prefixed(hex))?
}
CastSubcommand::ToHexdata { input } => {
let value = stdin::unwrap_line(input)?;
let output = match value {
s if s.starts_with('@') => hex::encode(std::env::var(&s[1..])?),
s if s.starts_with('/') => hex::encode(fs::read(s)?),
s => s.split(':').map(|s| s.trim_start_matches("0x").to_lowercase()).collect(),
};
sh_println!("0x{output}")?
}
CastSubcommand::ToCheckSumAddress { address, chain_id } => {
let value = stdin::unwrap_line(address)?;
sh_println!("{}", value.to_checksum(chain_id))?
}
CastSubcommand::ToUint256 { value } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_uint256(&value)?)?
}
CastSubcommand::ToInt256 { value } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_int256(&value)?)?
}
CastSubcommand::ToUnit { value, unit } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_unit(&value, &unit)?)?
}
CastSubcommand::ParseUnits { value, unit } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::parse_units(&value, unit)?)?;
}
CastSubcommand::FormatUnits { value, unit } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::format_units(&value, unit)?)?;
}
CastSubcommand::FromWei { value, unit } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::from_wei(&value, &unit)?)?
}
CastSubcommand::ToWei { value, unit } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_wei(&value, &unit)?)?
}
CastSubcommand::FromRlp { value, as_int } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::from_rlp(value, as_int)?)?
}
CastSubcommand::ToRlp { value } => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_rlp(&value)?)?
}
CastSubcommand::ToHex(ToBaseArgs { value, base_in }) => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_base(&value, base_in.as_deref(), "hex")?)?
}
CastSubcommand::ToDec(ToBaseArgs { value, base_in }) => {
let value = stdin::unwrap_line(value)?;
sh_println!("{}", SimpleCast::to_base(&value, base_in.as_deref(), "dec")?)?
}
CastSubcommand::ToBase { base: ToBaseArgs { value, base_in }, base_out } => {
let (value, base_out) = stdin::unwrap2(value, base_out)?;
sh_println!("{}", SimpleCast::to_base(&value, base_in.as_deref(), &base_out)?)?
}
CastSubcommand::ToBytes32 { bytes } => {
let value = stdin::unwrap_line(bytes)?;
sh_println!("{}", SimpleCast::to_bytes32(&value)?)?
}
CastSubcommand::Pad { data, right, left: _, len } => {
let value = stdin::unwrap_line(data)?;
sh_println!("{}", SimpleCast::pad(&value, right, len)?)?
}
CastSubcommand::FormatBytes32String { string } => {
let value = stdin::unwrap_line(string)?;
sh_println!("{}", SimpleCast::format_bytes32_string(&value)?)?
}
CastSubcommand::ParseBytes32String { bytes } => {
let value = stdin::unwrap_line(bytes)?;
sh_println!("{}", SimpleCast::parse_bytes32_string(&value)?)?
}
CastSubcommand::ParseBytes32Address { bytes } => {
let value = stdin::unwrap_line(bytes)?;
sh_println!("{}", SimpleCast::parse_bytes32_address(&value)?)?
}
// ABI encoding & decoding
CastSubcommand::DecodeAbi { sig, calldata, input } => {
let tokens = SimpleCast::abi_decode(&sig, &calldata, input)?;
print_tokens(&tokens);
}
CastSubcommand::AbiEncode { sig, packed, args } => {
if !packed {
sh_println!("{}", SimpleCast::abi_encode(&sig, &args)?)?
} else {
sh_println!("{}", SimpleCast::abi_encode_packed(&sig, &args)?)?
}
}
CastSubcommand::AbiEncodeEvent { sig, args } => {
let log_data = SimpleCast::abi_encode_event(&sig, &args)?;
for (i, topic) in log_data.topics().iter().enumerate() {
sh_println!("[topic{}]: {}", i, topic)?;
}
if !log_data.data.is_empty() {
sh_println!("[data]: {}", hex::encode_prefixed(log_data.data))?;
}
}
CastSubcommand::DecodeCalldata { sig, calldata, file } => {
let raw_hex = if let Some(file_path) = file {
let contents = fs::read_to_string(&file_path)?;
contents.trim().to_string()
} else {
calldata.unwrap()
};
let tokens = SimpleCast::calldata_decode(&sig, &raw_hex, true)?;
print_tokens(&tokens);
}
CastSubcommand::CalldataEncode { sig, args, file } => {
let final_args = if let Some(file_path) = file {
let contents = fs::read_to_string(file_path)?;
contents
.lines()
.map(str::trim)
.filter(|line| !line.is_empty())
.map(String::from)
.collect()
} else {
args
};
sh_println!("{}", SimpleCast::calldata_encode(sig, &final_args)?)?;
}
CastSubcommand::DecodeString { data } => {
let tokens = SimpleCast::calldata_decode("Any(string)", &data, true)?;
print_tokens(&tokens);
}
CastSubcommand::DecodeEvent { sig, data } => {
let decoded_event = if let Some(event_sig) = sig {
let event = get_event(event_sig.as_str())?;
event.decode_log_parts(core::iter::once(event.selector()), &hex::decode(data)?)?
} else {
let data = crate::strip_0x(&data);
let selector = data.get(..64).unwrap_or_default();
let selector = selector.parse()?;
let identified_event =
SignaturesIdentifier::new(false)?.identify_event(selector).await;
if let Some(event) = identified_event {
let _ = sh_println!("{}", event.signature());
let data = data.get(64..).unwrap_or_default();
get_event(event.signature().as_str())?
.decode_log_parts(core::iter::once(selector), &hex::decode(data)?)?
} else {
eyre::bail!("No matching event signature found for selector `{selector}`")
}
};
print_tokens(&decoded_event.body);
}
CastSubcommand::DecodeError { sig, data } => {
let error = if let Some(err_sig) = sig {
get_error(err_sig.as_str())?
} else {
let data = crate::strip_0x(&data);
let selector = data.get(..8).unwrap_or_default();
let identified_error =
SignaturesIdentifier::new(false)?.identify_error(selector.parse()?).await;
if let Some(error) = identified_error {
let _ = sh_println!("{}", error.signature());
error
} else {
eyre::bail!("No matching error signature found for selector `{selector}`")
}
};
let decoded_error = error.decode_error(&hex::decode(data)?)?;
print_tokens(&decoded_error.body);
}
CastSubcommand::Interface(cmd) => cmd.run().await?,
CastSubcommand::CreationCode(cmd) => cmd.run().await?,
CastSubcommand::ConstructorArgs(cmd) => cmd.run().await?,
CastSubcommand::Artifact(cmd) => cmd.run().await?,
CastSubcommand::Bind(cmd) => cmd.run().await?,
CastSubcommand::B2EPayload(cmd) => cmd.run().await?,
CastSubcommand::PrettyCalldata { calldata, offline } => {
let calldata = stdin::unwrap_line(calldata)?;
sh_println!("{}", pretty_calldata(&calldata, offline).await?)?;
}
CastSubcommand::Sig { sig, optimize } => {
let sig = stdin::unwrap_line(sig)?;
match optimize {
Some(opt) => {
sh_println!("Starting to optimize signature...")?;
let start_time = Instant::now();
let (selector, signature) = SimpleCast::get_selector(&sig, opt)?;
sh_println!("Successfully generated in {:?}", start_time.elapsed())?;
sh_println!("Selector: {selector}")?;
sh_println!("Optimized signature: {signature}")?;
}
None => sh_println!("{}", SimpleCast::get_selector(&sig, 0)?.0)?,
}
}
// Blockchain & RPC queries
CastSubcommand::AccessList(cmd) => cmd.run().await?,
CastSubcommand::Age { block, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!(
"{} UTC",
Cast::new(provider).age(block.unwrap_or(BlockId::Number(Latest))).await?
)?
}
CastSubcommand::Balance { block, who, ether, rpc, erc20 } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let account_addr = who.resolve(&provider).await?;
match erc20 {
Some(token) => {
let balance = IERC20::new(token, &provider)
.balanceOf(account_addr)
.block(block.unwrap_or_default())
.call()
.await?;
sh_warn!("--erc20 flag is deprecated, use `cast erc20 balance` instead")?;
sh_println!("{}", format_uint_exp(balance))?
}
None => {
let value = Cast::new(&provider).balance(account_addr, block).await?;
if ether {
sh_println!("{}", SimpleCast::from_wei(&value.to_string(), "eth")?)?
} else {
sh_println!("{value}")?
}
}
}
}
CastSubcommand::BaseFee { block, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!(
"{}",
Cast::new(provider).base_fee(block.unwrap_or(BlockId::Number(Latest))).await?
)?
}
CastSubcommand::Block { block, full, fields, raw, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
// Can use either --raw or specify raw as a field
let raw = raw || fields.contains(&"raw".into());
sh_println!(
"{}",
Cast::new(provider)
.block(block.unwrap_or(BlockId::Number(Latest)), full, fields, raw)
.await?
)?
}
CastSubcommand::BlockNumber { rpc, block } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let number = match block {
Some(id) => {
provider
.get_block(id)
.await?
.ok_or_else(|| eyre::eyre!("block {id:?} not found"))?
.header
.number
}
None => Cast::new(provider).block_number().await?,
};
sh_println!("{number}")?
}
CastSubcommand::Chain { rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!("{}", Cast::new(provider).chain().await?)?
}
CastSubcommand::ChainId { rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!("{}", Cast::new(provider).chain_id().await?)?
}
CastSubcommand::Client { rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!("{}", provider.get_client_version().await?)?
}
CastSubcommand::Code { block, who, disassemble, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).code(who, block, disassemble).await?)?
}
CastSubcommand::Codesize { block, who, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).codesize(who, block).await?)?
}
CastSubcommand::ComputeAddress { address, nonce, salt, init_code, init_code_hash, rpc } => {
let address = stdin::unwrap_line(address)?;
let computed = {
// For CREATE2, init_code_hash is needed to compute the address
if let Some(init_code_hash) = init_code_hash {
address.create2(salt.unwrap_or(B256::ZERO), init_code_hash)
} else if let Some(init_code) = init_code {
address.create2(salt.unwrap_or(B256::ZERO), keccak256(hex::decode(init_code)?))
} else {
// For CREATE, rpc is needed to compute the address
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
Cast::new(provider).compute_address(address, nonce).await?
}
};
sh_println!("Computed Address: {}", computed.to_checksum(None))?
}
CastSubcommand::Disassemble { bytecode } => {
let bytecode = stdin::unwrap_line(bytecode)?;
sh_println!("{}", SimpleCast::disassemble(&hex::decode(bytecode)?)?)?
}
CastSubcommand::Selectors { bytecode, resolve } => {
let bytecode = stdin::unwrap_line(bytecode)?;
let functions = SimpleCast::extract_functions(&bytecode)?;
let max_args_len = functions.iter().map(|r| r.1.len()).max().unwrap_or(0);
let max_mutability_len = functions.iter().map(|r| r.2.len()).max().unwrap_or(0);
let resolve_results = if resolve {
let selectors = functions
.iter()
.map(|&(selector, ..)| SelectorKind::Function(selector))
.collect::<Vec<_>>();
let ds = decode_selectors(&selectors).await?;
ds.into_iter().map(|v| v.join("|")).collect()
} else {
vec![]
};
for (pos, (selector, arguments, state_mutability)) in functions.into_iter().enumerate()
{
if resolve {
let resolved = &resolve_results[pos];
sh_println!(
"{selector}\t{arguments:max_args_len$}\t{state_mutability:max_mutability_len$}\t{resolved}"
)?
} else {
sh_println!("{selector}\t{arguments:max_args_len$}\t{state_mutability}")?
}
}
}
CastSubcommand::FindBlock(cmd) => cmd.run().await?,
CastSubcommand::GasPrice { rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!("{}", Cast::new(provider).gas_price().await?)?;
}
CastSubcommand::Index { key_type, key, slot_number } => {
sh_println!("{}", SimpleCast::index(&key_type, &key, &slot_number)?)?;
}
CastSubcommand::IndexErc7201 { id, formula_id } => {
eyre::ensure!(formula_id == "erc7201", "unsupported formula ID: {formula_id}");
let id = stdin::unwrap_line(id)?;
sh_println!("{}", foundry_common::erc7201(&id))?;
}
CastSubcommand::Implementation { block, beacon, who, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).implementation(who, beacon, block).await?)?;
}
CastSubcommand::Admin { block, who, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).admin(who, block).await?)?;
}
CastSubcommand::Nonce { block, who, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).nonce(who, block).await?)?;
}
CastSubcommand::Codehash { block, who, slots, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).codehash(who, slots, block).await?)?;
}
CastSubcommand::StorageRoot { block, who, slots, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = who.resolve(&provider).await?;
sh_println!("{}", Cast::new(provider).storage_root(who, slots, block).await?)?;
}
CastSubcommand::Proof { address, slots, rpc, block } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let address = address.resolve(&provider).await?;
let value = provider
.get_proof(address, slots.into_iter().collect())
.block_id(block.unwrap_or_default())
.await?;
sh_println!("{}", serde_json::to_string(&value)?)?;
}
CastSubcommand::Rpc(cmd) => cmd.run().await?,
CastSubcommand::Storage(cmd) => cmd.run().await?,
// Calls & transactions
CastSubcommand::Call(cmd) => cmd.run().await?,
CastSubcommand::Estimate(cmd) => cmd.run().await?,
CastSubcommand::MakeTx(cmd) => cmd.run().await?,
CastSubcommand::PublishTx { raw_tx, cast_async, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let cast = Cast::new(&provider);
let pending_tx = cast.publish(raw_tx).await?;
let tx_hash = pending_tx.inner().tx_hash();
if cast_async {
sh_println!("{tx_hash:#x}")?;
} else {
let receipt = pending_tx.get_receipt().await?;
sh_println!("{}", serde_json::json!(receipt))?;
}
}
CastSubcommand::Receipt { tx_hash, field, cast_async, confirmations, rpc } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
sh_println!(
"{}",
CastTxSender::new(provider)
.receipt(tx_hash, field, confirmations, None, cast_async)
.await?
)?
}
CastSubcommand::Run(cmd) => cmd.run().await?,
CastSubcommand::SendTx(cmd) => cmd.run().await?,
CastSubcommand::Tx { tx_hash, from, nonce, field, raw, rpc, to_request } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
// Can use either --raw or specify raw as a field
let raw = raw || field.as_ref().is_some_and(|f| f == "raw");
sh_println!(
"{}",
Cast::new(&provider)
.transaction(tx_hash, from, nonce, field, raw, to_request)
.await?
)?
}
// 4Byte
CastSubcommand::FourByte { selector } => {
let selector = stdin::unwrap_line(selector)?;
let sigs = decode_function_selector(selector).await?;
if sigs.is_empty() {
eyre::bail!("No matching function signatures found for selector `{selector}`");
}
for sig in sigs {
sh_println!("{sig}")?
}
}
CastSubcommand::FourByteCalldata { calldata } => {
let calldata = stdin::unwrap_line(calldata)?;
if calldata.len() == 10 {
let sigs = decode_function_selector(calldata.parse()?).await?;
if sigs.is_empty() {
eyre::bail!("No matching function signatures found for calldata `{calldata}`");
}
for sig in sigs {
sh_println!("{sig}")?
}
return Ok(());
}
let sigs = decode_calldata(&calldata).await?;
sigs.iter().enumerate().for_each(|(i, sig)| {
let _ = sh_println!("{}) \"{sig}\"", i + 1);
});
let sig = match sigs.len() {
0 => eyre::bail!("No signatures found"),
1 => sigs.first().unwrap(),
_ => {
let i: usize = prompt!("Select a function signature by number: ")?;
sigs.get(i - 1).ok_or_else(|| eyre::eyre!("Invalid signature index"))?
}
};
let tokens = SimpleCast::calldata_decode(sig, &calldata, true)?;
print_tokens(&tokens);
}
CastSubcommand::FourByteEvent { topic } => {
let topic = stdin::unwrap_line(topic)?;
let sigs = decode_event_topic(topic).await?;
if sigs.is_empty() {
eyre::bail!("No matching event signatures found for topic `{topic}`");
}
for sig in sigs {
sh_println!("{sig}")?
}
}
CastSubcommand::UploadSignature { signatures } => {
let signatures = stdin::unwrap_vec(signatures)?;
let ParsedSignatures { signatures, abis } = parse_signatures(signatures);
if !abis.is_empty() {
import_selectors(SelectorImportData::Abi(abis)).await?.describe();
}
if !signatures.is_empty() {
import_selectors(SelectorImportData::Raw(signatures)).await?.describe();
}
}
// ENS
CastSubcommand::Namehash { name } => {
let name = stdin::unwrap_line(name)?;
sh_println!("{}", namehash(&name))?
}
CastSubcommand::LookupAddress { who, rpc, verify } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = stdin::unwrap_line(who)?;
let name = provider.lookup_address(&who).await?;
if verify {
let address = provider.resolve_name(&name).await?;
eyre::ensure!(
address == who,
"Reverse lookup verification failed: got `{address}`, expected `{who}`"
);
}
sh_println!("{name}")?
}
CastSubcommand::ResolveName { who, rpc, verify } => {
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let who = stdin::unwrap_line(who)?;
let address = provider.resolve_name(&who).await?;
if verify {
let name = provider.lookup_address(&address).await?;
eyre::ensure!(
name == who,
"Forward lookup verification failed: got `{name}`, expected `{who}`"
);
}
sh_println!("{address}")?
}
// Misc
CastSubcommand::Keccak { data } => {
let bytes = match data {
Some(data) => data.into_bytes(),
None => stdin::read_bytes(false)?,
};
match String::from_utf8(bytes) {
Ok(s) => {
let s = SimpleCast::keccak(&s)?;
sh_println!("{s}")?
}
Err(e) => {
let hash = keccak256(e.as_bytes());
let s = hex::encode(hash);
sh_println!("0x{s}")?
}
};
}
CastSubcommand::HashMessage { message } => {
let message = stdin::unwrap(message, false)?;
sh_println!("{}", eip191_hash_message(message))?
}
CastSubcommand::SigEvent { event_string } => {
let event_string = stdin::unwrap_line(event_string)?;
let parsed_event = get_event(&event_string)?;
sh_println!("{:?}", parsed_event.selector())?
}
CastSubcommand::LeftShift { value, bits, base_in, base_out } => sh_println!(
"{}",
SimpleCast::left_shift(&value, &bits, base_in.as_deref(), &base_out)?
)?,
CastSubcommand::RightShift { value, bits, base_in, base_out } => sh_println!(
"{}",
SimpleCast::right_shift(&value, &bits, base_in.as_deref(), &base_out)?
)?,
CastSubcommand::Source {
address,
directory,
explorer_api_url,
explorer_url,
etherscan,
flatten,
} => {
let config = etherscan.load_config()?;
let chain = config.chain.unwrap_or_default();
let api_key = config.get_etherscan_api_key(Some(chain));
match (directory, flatten) {
(Some(dir), false) => {
SimpleCast::expand_etherscan_source_to_directory(
chain,
address,
api_key,
dir,
explorer_api_url,
explorer_url,
)
.await?
}
(None, false) => sh_println!(
"{}",
SimpleCast::etherscan_source(
chain,
address,
api_key,
explorer_api_url,
explorer_url
)
.await?
)?,
(dir, true) => {
SimpleCast::etherscan_source_flatten(
chain,
address,
api_key,
dir,
explorer_api_url,
explorer_url,
)
.await?;
}
}
}
CastSubcommand::Create2(cmd) => {
cmd.run()?;
}
CastSubcommand::Wallet { command } => command.run().await?,
CastSubcommand::Completions { shell } => {
generate(shell, &mut CastArgs::command(), "cast", &mut std::io::stdout())
}
CastSubcommand::Logs(cmd) => cmd.run().await?,
CastSubcommand::DecodeTransaction { tx } => {
let tx = stdin::unwrap_line(tx)?;
let tx = SimpleCast::decode_raw_transaction(&tx)?;
if let Ok(signer) = tx.recover() {
let recovered = Recovered::new_unchecked(tx, signer);
sh_println!("{}", serde_json::to_string_pretty(&recovered)?)?;
} else {
sh_println!("{}", serde_json::to_string_pretty(&tx)?)?;
}
}
CastSubcommand::RecoverAuthority { auth } => {
let auth: SignedAuthorization = serde_json::from_str(&auth)?;
sh_println!("{}", auth.recover_authority()?)?;
}
CastSubcommand::TxPool { command } => command.run().await?,
CastSubcommand::Erc20Token { command } => command.run().await?,
CastSubcommand::DAEstimate(cmd) => {
cmd.run().await?;
}
};
/// Prints slice of tokens using [`format_tokens`] or [`serialize_value_as_json`] depending
/// whether the shell is in JSON mode.
///
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/tx.rs | crates/cast/src/tx.rs | use crate::traces::identifier::SignaturesIdentifier;
use alloy_consensus::{SidecarBuilder, SignableTransaction, SimpleCoder};
use alloy_dyn_abi::ErrorExt;
use alloy_ens::NameOrAddress;
use alloy_json_abi::Function;
use alloy_network::{
AnyNetwork, TransactionBuilder, TransactionBuilder4844, TransactionBuilder7702,
};
use alloy_primitives::{Address, Bytes, TxHash, TxKind, U256, hex};
use alloy_provider::{PendingTransactionBuilder, Provider};
use alloy_rpc_types::{AccessList, Authorization, TransactionInputKind, TransactionRequest};
use alloy_serde::WithOtherFields;
use alloy_signer::Signer;
use alloy_transport::TransportError;
use clap::Args;
use eyre::{Result, WrapErr};
use foundry_cli::{
opts::{CliAuthorizationList, EthereumOpts, TransactionOpts},
utils::{self, LoadConfig, get_provider_builder, parse_function_args},
};
use foundry_common::{
TransactionReceiptWithRevertReason, fmt::*, get_pretty_tx_receipt_attr,
provider::RetryProviderWithSigner, shell,
};
use foundry_config::{Chain, Config};
use foundry_primitives::{FoundryTransactionRequest, FoundryTypedTx};
use foundry_wallets::{WalletOpts, WalletSigner};
use itertools::Itertools;
use serde_json::value::RawValue;
use std::{fmt::Write, str::FromStr, time::Duration};
#[derive(Debug, Clone, Args)]
pub struct SendTxOpts {
/// Only print the transaction hash and exit immediately.
#[arg(id = "async", long = "async", alias = "cast-async", env = "CAST_ASYNC")]
pub cast_async: bool,
/// Wait for transaction receipt synchronously instead of polling.
/// Note: uses `eth_sendTransactionSync` which may not be supported by all clients.
#[arg(long, conflicts_with = "async")]
pub sync: bool,
/// The number of confirmations until the receipt is fetched.
#[arg(long, default_value = "1")]
pub confirmations: u64,
/// Timeout for sending the transaction.
#[arg(long, env = "ETH_TIMEOUT")]
pub timeout: Option<u64>,
/// Polling interval for transaction receipts (in seconds).
#[arg(long, alias = "poll-interval", env = "ETH_POLL_INTERVAL")]
pub poll_interval: Option<u64>,
/// Ethereum options
#[command(flatten)]
pub eth: EthereumOpts,
}
/// Different sender kinds used by [`CastTxBuilder`].
pub enum SenderKind<'a> {
/// An address without signer. Used for read-only calls and transactions sent through unlocked
/// accounts.
Address(Address),
/// A reference to a signer.
Signer(&'a WalletSigner),
/// An owned signer.
OwnedSigner(Box<WalletSigner>),
}
impl SenderKind<'_> {
/// Resolves the name to an Ethereum Address.
pub fn address(&self) -> Address {
match self {
Self::Address(addr) => *addr,
Self::Signer(signer) => signer.address(),
Self::OwnedSigner(signer) => signer.address(),
}
}
/// Resolves the sender from the wallet options.
///
/// This function prefers the `from` field and may return a different address from the
/// configured signer
/// If from is specified, returns it
/// If from is not specified, but there is a signer configured, returns the signer's address
/// If from is not specified and there is no signer configured, returns zero address
pub async fn from_wallet_opts(opts: WalletOpts) -> Result<Self> {
if let Some(from) = opts.from {
Ok(from.into())
} else if let Ok(signer) = opts.signer().await {
Ok(Self::OwnedSigner(Box::new(signer)))
} else {
Ok(Address::ZERO.into())
}
}
/// Returns the signer if available.
pub fn as_signer(&self) -> Option<&WalletSigner> {
match self {
Self::Signer(signer) => Some(signer),
Self::OwnedSigner(signer) => Some(signer.as_ref()),
_ => None,
}
}
}
impl From<Address> for SenderKind<'_> {
fn from(addr: Address) -> Self {
Self::Address(addr)
}
}
impl<'a> From<&'a WalletSigner> for SenderKind<'a> {
fn from(signer: &'a WalletSigner) -> Self {
Self::Signer(signer)
}
}
impl From<WalletSigner> for SenderKind<'_> {
fn from(signer: WalletSigner) -> Self {
Self::OwnedSigner(Box::new(signer))
}
}
/// Prevents a misconfigured hwlib from sending a transaction that defies user-specified --from
pub fn validate_from_address(
specified_from: Option<Address>,
signer_address: Address,
) -> Result<()> {
if let Some(specified_from) = specified_from
&& specified_from != signer_address
{
eyre::bail!(
"\
The specified sender via CLI/env vars does not match the sender configured via
the hardware wallet's HD Path.
Please use the `--hd-path <PATH>` parameter to specify the BIP32 Path which
corresponds to the sender, or let foundry automatically detect it by not specifying any sender address."
)
}
Ok(())
}
/// Initial state.
#[derive(Debug)]
pub struct InitState;
/// State with known [TxKind].
#[derive(Debug)]
pub struct ToState {
to: Option<Address>,
}
/// State with known input for the transaction.
#[derive(Debug)]
pub struct InputState {
kind: TxKind,
input: Vec<u8>,
func: Option<Function>,
}
pub struct CastTxSender<P> {
provider: P,
}
impl<P: Provider<AnyNetwork>> CastTxSender<P> {
/// Creates a new Cast instance responsible for sending transactions.
pub fn new(provider: P) -> Self {
Self { provider }
}
/// Sends a transaction and waits for receipt synchronously
pub async fn send_sync(&self, tx: WithOtherFields<TransactionRequest>) -> Result<String> {
let mut receipt: TransactionReceiptWithRevertReason =
self.provider.send_transaction_sync(tx).await?.into();
// Allow to fail silently
let _ = receipt.update_revert_reason(&self.provider).await;
self.format_receipt(receipt, None)
}
/// Sends a transaction to the specified address
///
/// # Example
///
/// ```
/// use cast::tx::CastTxSender;
/// use alloy_primitives::{Address, U256, Bytes};
/// use alloy_serde::WithOtherFields;
/// use alloy_rpc_types::{TransactionRequest};
/// use alloy_provider::{RootProvider, ProviderBuilder, network::AnyNetwork};
/// use std::str::FromStr;
/// use alloy_sol_types::{sol, SolCall}; ///
///
/// sol!(
/// function greet(string greeting) public;
/// );
///
/// # async fn foo() -> eyre::Result<()> {
/// let provider = ProviderBuilder::<_,_, AnyNetwork>::default().connect("http://localhost:8545").await?;;
/// let from = Address::from_str("0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045")?;
/// let to = Address::from_str("0xB3C95ff08316fb2F2e3E52Ee82F8e7b605Aa1304")?;
/// let greeting = greetCall { greeting: "hello".to_string() }.abi_encode();
/// let bytes = Bytes::from_iter(greeting.iter());
/// let gas = U256::from_str("200000").unwrap();
/// let value = U256::from_str("1").unwrap();
/// let nonce = U256::from_str("1").unwrap();
/// let tx = TransactionRequest::default().to(to).input(bytes.into()).from(from);
/// let tx = WithOtherFields::new(tx);
/// let cast = CastTxSender::new(provider);
/// let data = cast.send(tx).await?;
/// println!("{:#?}", data);
/// # Ok(())
/// # }
/// ```
pub async fn send(
&self,
tx: WithOtherFields<TransactionRequest>,
) -> Result<PendingTransactionBuilder<AnyNetwork>> {
let res = self.provider.send_transaction(tx).await?;
Ok(res)
}
/// Sends a raw RLP-encoded transaction via `eth_sendRawTransaction`.
///
/// Used for transaction types that the standard Alloy network stack doesn't understand
/// (e.g., Tempo transactions).
pub async fn send_raw(&self, raw_tx: &[u8]) -> Result<PendingTransactionBuilder<AnyNetwork>> {
let res = self.provider.send_raw_transaction(raw_tx).await?;
Ok(res)
}
/// # Example
///
/// ```
/// use alloy_provider::{ProviderBuilder, RootProvider, network::AnyNetwork};
/// use cast::tx::CastTxSender;
///
/// async fn foo() -> eyre::Result<()> {
/// let provider =
/// ProviderBuilder::<_, _, AnyNetwork>::default().connect("http://localhost:8545").await?;
/// let cast = CastTxSender::new(provider);
/// let tx_hash = "0xf8d1713ea15a81482958fb7ddf884baee8d3bcc478c5f2f604e008dc788ee4fc";
/// let receipt = cast.receipt(tx_hash.to_string(), None, 1, None, false).await?;
/// println!("{}", receipt);
/// # Ok(())
/// # }
/// ```
pub async fn receipt(
&self,
tx_hash: String,
field: Option<String>,
confs: u64,
timeout: Option<u64>,
cast_async: bool,
) -> Result<String> {
let tx_hash = TxHash::from_str(&tx_hash).wrap_err("invalid tx hash")?;
let mut receipt: TransactionReceiptWithRevertReason =
match self.provider.get_transaction_receipt(tx_hash).await? {
Some(r) => r,
None => {
// if the async flag is provided, immediately exit if no tx is found, otherwise
// try to poll for it
if cast_async {
eyre::bail!("tx not found: {:?}", tx_hash)
} else {
PendingTransactionBuilder::new(self.provider.root().clone(), tx_hash)
.with_required_confirmations(confs)
.with_timeout(timeout.map(Duration::from_secs))
.get_receipt()
.await?
}
}
}
.into();
// Allow to fail silently
let _ = receipt.update_revert_reason(&self.provider).await;
self.format_receipt(receipt, field)
}
/// Helper method to format transaction receipts consistently
fn format_receipt(
&self,
receipt: TransactionReceiptWithRevertReason,
field: Option<String>,
) -> Result<String> {
Ok(if let Some(ref field) = field {
get_pretty_tx_receipt_attr(&receipt, field)
.ok_or_else(|| eyre::eyre!("invalid receipt field: {}", field))?
} else if shell::is_json() {
// to_value first to sort json object keys
serde_json::to_value(&receipt)?.to_string()
} else {
receipt.pretty()
})
}
}
/// Builder type constructing [TransactionRequest] from cast send/mktx inputs.
///
/// It is implemented as a stateful builder with expected state transition of [InitState] ->
/// [ToState] -> [InputState].
#[derive(Debug)]
pub struct CastTxBuilder<P, S> {
provider: P,
tx: WithOtherFields<TransactionRequest>,
/// Whether the transaction should be sent as a legacy transaction.
legacy: bool,
blob: bool,
auth: Vec<CliAuthorizationList>,
chain: Chain,
etherscan_api_key: Option<String>,
access_list: Option<Option<AccessList>>,
state: S,
}
impl<P: Provider<AnyNetwork>> CastTxBuilder<P, InitState> {
/// Creates a new instance of [CastTxBuilder] filling transaction with fields present in
/// provided [TransactionOpts].
pub async fn new(provider: P, tx_opts: TransactionOpts, config: &Config) -> Result<Self> {
let mut tx = WithOtherFields::<TransactionRequest>::default();
let chain = utils::get_chain(config.chain, &provider).await?;
let etherscan_api_key = config.get_etherscan_api_key(Some(chain));
// mark it as legacy if requested or the chain is legacy and no 7702 is provided.
let legacy = tx_opts.legacy || (chain.is_legacy() && tx_opts.auth.is_empty());
if let Some(gas_limit) = tx_opts.gas_limit {
tx.set_gas_limit(gas_limit.to());
}
if let Some(value) = tx_opts.value {
tx.set_value(value);
}
if let Some(gas_price) = tx_opts.gas_price {
if legacy {
tx.set_gas_price(gas_price.to());
} else {
tx.set_max_fee_per_gas(gas_price.to());
}
}
if !legacy && let Some(priority_fee) = tx_opts.priority_gas_price {
tx.set_max_priority_fee_per_gas(priority_fee.to());
}
if let Some(max_blob_fee) = tx_opts.blob_gas_price {
tx.set_max_fee_per_blob_gas(max_blob_fee.to())
}
if let Some(nonce) = tx_opts.nonce {
tx.set_nonce(nonce.to());
}
// Set Tempo fee token if provided
if let Some(fee_token) = tx_opts.tempo.fee_token {
tx.other.insert("feeToken".to_string(), serde_json::to_value(fee_token).unwrap());
}
if let Some(nonce_key) = tx_opts.tempo.sequence_key {
tx.other.insert("nonceKey".to_string(), serde_json::to_value(nonce_key).unwrap());
}
Ok(Self {
provider,
tx,
legacy,
blob: tx_opts.blob,
chain,
etherscan_api_key,
auth: tx_opts.auth,
access_list: tx_opts.access_list,
state: InitState,
})
}
/// Sets [TxKind] for this builder and changes state to [ToState].
pub async fn with_to(self, to: Option<NameOrAddress>) -> Result<CastTxBuilder<P, ToState>> {
let to = if let Some(to) = to { Some(to.resolve(&self.provider).await?) } else { None };
Ok(CastTxBuilder {
provider: self.provider,
tx: self.tx,
legacy: self.legacy,
blob: self.blob,
chain: self.chain,
etherscan_api_key: self.etherscan_api_key,
auth: self.auth,
access_list: self.access_list,
state: ToState { to },
})
}
}
impl<P: Provider<AnyNetwork>> CastTxBuilder<P, ToState> {
/// Accepts user-provided code, sig and args params and constructs calldata for the transaction.
/// If code is present, input will be set to code + encoded constructor arguments. If no code is
/// present, input is set to just provided arguments.
pub async fn with_code_sig_and_args(
self,
code: Option<String>,
sig: Option<String>,
args: Vec<String>,
) -> Result<CastTxBuilder<P, InputState>> {
let (mut args, func) = if let Some(sig) = sig {
parse_function_args(
&sig,
args,
self.state.to,
self.chain,
&self.provider,
self.etherscan_api_key.as_deref(),
)
.await?
} else {
(Vec::new(), None)
};
let input = if let Some(code) = &code {
let mut code = hex::decode(code)?;
code.append(&mut args);
code
} else {
args
};
if self.state.to.is_none() && code.is_none() {
let has_value = self.tx.value.is_some_and(|v| !v.is_zero());
let has_auth = !self.auth.is_empty();
// We only allow user to omit the recipient address if transaction is an EIP-7702 tx
// without a value.
if !has_auth || has_value {
eyre::bail!("Must specify a recipient address or contract code to deploy");
}
}
Ok(CastTxBuilder {
provider: self.provider,
tx: self.tx,
legacy: self.legacy,
blob: self.blob,
chain: self.chain,
etherscan_api_key: self.etherscan_api_key,
auth: self.auth,
access_list: self.access_list,
state: InputState { kind: self.state.to.into(), input, func },
})
}
}
impl<P: Provider<AnyNetwork>> CastTxBuilder<P, InputState> {
/// Builds a [FoundryTransactionRequest] and fills missing fields. Returns a transaction which
/// is ready to be broadcasted.
pub async fn build(
self,
sender: impl Into<SenderKind<'_>>,
) -> Result<(FoundryTransactionRequest, Option<Function>)> {
let (tx, func) = self._build(sender, true, false).await?;
Ok((FoundryTransactionRequest::new(tx), func))
}
/// Builds [TransactionRequest] without filling missing fields. Used for read-only calls such as
/// eth_call, eth_estimateGas, etc
pub async fn build_raw(
self,
sender: impl Into<SenderKind<'_>>,
) -> Result<(WithOtherFields<TransactionRequest>, Option<Function>)> {
self._build(sender, false, false).await
}
/// Builds an unsigned RLP-encoded raw transaction.
///
/// Returns the hex encoded string representation of the transaction.
pub async fn build_unsigned_raw(self, from: Address) -> Result<String> {
let (tx, _) = self._build(SenderKind::Address(from), true, true).await?;
let ftx = FoundryTransactionRequest::new(tx);
let tx = ftx.build_unsigned()?;
Ok(hex::encode_prefixed(match tx {
FoundryTypedTx::Legacy(t) => t.encoded_for_signing(),
FoundryTypedTx::Eip1559(t) => t.encoded_for_signing(),
FoundryTypedTx::Eip2930(t) => t.encoded_for_signing(),
FoundryTypedTx::Eip4844(t) => t.encoded_for_signing(),
FoundryTypedTx::Eip7702(t) => t.encoded_for_signing(),
FoundryTypedTx::Tempo(t) => t.encoded_for_signing(),
_ => eyre::bail!(
"Cannot generate unsigned transaction for transaction: unknown transaction type"
),
}))
}
/// Returns whether this builder will produce a Tempo transaction.
pub fn is_tempo(&self) -> bool {
// TODO: Replace this with `FoundryTransactionRequest::is_tempo`
self.tx.other.contains_key("feeToken") || self.tx.other.contains_key("nonceKey")
}
async fn _build(
mut self,
sender: impl Into<SenderKind<'_>>,
fill: bool,
unsigned: bool,
) -> Result<(WithOtherFields<TransactionRequest>, Option<Function>)> {
let sender = sender.into();
let from = sender.address();
self.tx.set_kind(self.state.kind);
// we set both fields to the same value because some nodes only accept the legacy `data` field: <https://github.com/foundry-rs/foundry/issues/7764#issuecomment-2210453249>
self.tx.set_input_kind(self.state.input.clone(), TransactionInputKind::Both);
self.tx.set_from(from);
self.tx.set_chain_id(self.chain.id());
let tx_nonce = if let Some(nonce) = self.tx.nonce {
nonce
} else {
let nonce = self.provider.get_transaction_count(from).await?;
if fill {
self.tx.nonce = Some(nonce);
}
nonce
};
if !unsigned {
self.resolve_auth(sender, tx_nonce).await?;
} else if !self.auth.is_empty() {
let mut signed_auths = Vec::with_capacity(self.auth.len());
for auth in std::mem::take(&mut self.auth) {
let CliAuthorizationList::Signed(signed_auth) = auth else {
eyre::bail!(
"SignedAuthorization needs to be provided for generating unsigned 7702 txs"
)
};
signed_auths.push(signed_auth);
}
self.tx.set_authorization_list(signed_auths);
}
if let Some(access_list) = match self.access_list.take() {
None => None,
// --access-list provided with no value, call the provider to create it
Some(None) => Some(self.provider.create_access_list(&self.tx).await?.access_list),
// Access list provided as a string, attempt to parse it
Some(Some(access_list)) => Some(access_list),
} {
self.tx.set_access_list(access_list);
}
if !fill {
return Ok((self.tx, self.state.func));
}
if self.legacy && self.tx.gas_price.is_none() {
self.tx.gas_price = Some(self.provider.get_gas_price().await?);
}
if self.blob && self.tx.max_fee_per_blob_gas.is_none() {
self.tx.max_fee_per_blob_gas = Some(self.provider.get_blob_base_fee().await?)
}
if !self.legacy
&& (self.tx.max_fee_per_gas.is_none() || self.tx.max_priority_fee_per_gas.is_none())
{
let estimate = self.provider.estimate_eip1559_fees().await?;
if self.tx.max_fee_per_gas.is_none() {
self.tx.max_fee_per_gas = Some(estimate.max_fee_per_gas);
}
if self.tx.max_priority_fee_per_gas.is_none() {
self.tx.max_priority_fee_per_gas = Some(estimate.max_priority_fee_per_gas);
}
}
if self.tx.gas.is_none() {
self.estimate_gas().await?;
}
Ok((self.tx, self.state.func))
}
/// Estimate tx gas from provider call. Tries to decode custom error if execution reverted.
async fn estimate_gas(&mut self) -> Result<()> {
match self.provider.estimate_gas(self.tx.clone()).await {
Ok(estimated) => {
self.tx.gas = Some(estimated);
Ok(())
}
Err(err) => {
if let TransportError::ErrorResp(payload) = &err {
// If execution reverted with code 3 during provider gas estimation then try
// to decode custom errors and append it to the error message.
if payload.code == 3
&& let Some(data) = &payload.data
&& let Ok(Some(decoded_error)) = decode_execution_revert(data).await
{
eyre::bail!("Failed to estimate gas: {}: {}", err, decoded_error)
}
}
eyre::bail!("Failed to estimate gas: {}", err)
}
}
}
/// Parses the passed --auth values and sets the authorization list on the transaction.
async fn resolve_auth(&mut self, sender: SenderKind<'_>, tx_nonce: u64) -> Result<()> {
if self.auth.is_empty() {
return Ok(());
}
let auths = std::mem::take(&mut self.auth);
// Validate that at most one address-based auth is provided (multiple addresses are
// almost always unintended).
let address_auth_count =
auths.iter().filter(|a| matches!(a, CliAuthorizationList::Address(_))).count();
if address_auth_count > 1 {
eyre::bail!(
"Multiple address-based authorizations provided. Only one address can be specified; \
use pre-signed authorizations (hex-encoded) for multiple authorizations."
);
}
let mut signed_auths = Vec::with_capacity(auths.len());
for auth in auths {
let signed_auth = match auth {
CliAuthorizationList::Address(address) => {
let auth = Authorization {
chain_id: U256::from(self.chain.id()),
nonce: tx_nonce + 1,
address,
};
let Some(signer) = sender.as_signer() else {
eyre::bail!("No signer available to sign authorization");
};
let signature = signer.sign_hash(&auth.signature_hash()).await?;
auth.into_signed(signature)
}
CliAuthorizationList::Signed(auth) => auth,
};
signed_auths.push(signed_auth);
}
self.tx.set_authorization_list(signed_auths);
Ok(())
}
}
impl<P, S> CastTxBuilder<P, S>
where
P: Provider<AnyNetwork>,
{
/// Populates the blob sidecar for the transaction if any blob data was provided.
pub fn with_blob_data(mut self, blob_data: Option<Vec<u8>>) -> Result<Self> {
let Some(blob_data) = blob_data else { return Ok(self) };
let mut coder = SidecarBuilder::<SimpleCoder>::default();
coder.ingest(&blob_data);
let sidecar = coder.build()?;
self.tx.set_blob_sidecar(sidecar);
self.tx.populate_blob_hashes();
Ok(self)
}
}
/// Helper function that tries to decode custom error name and inputs from error payload data.
async fn decode_execution_revert(data: &RawValue) -> Result<Option<String>> {
let err_data = serde_json::from_str::<Bytes>(data.get())?;
let Some(selector) = err_data.get(..4) else { return Ok(None) };
if let Some(known_error) =
SignaturesIdentifier::new(false)?.identify_error(selector.try_into().unwrap()).await
{
let mut decoded_error = known_error.name.clone();
if !known_error.inputs.is_empty()
&& let Ok(error) = known_error.decode_error(&err_data)
{
write!(decoded_error, "({})", format_tokens(&error.body).format(", "))?;
}
return Ok(Some(decoded_error));
}
Ok(None)
}
/// Creates a provider with wallet for signing transactions locally.
pub(crate) async fn signing_provider(
tx_opts: &SendTxOpts,
) -> eyre::Result<RetryProviderWithSigner> {
let config = tx_opts.eth.load_config()?;
let signer = tx_opts.eth.wallet.signer().await?;
let wallet = alloy_network::EthereumWallet::from(signer);
let provider = get_provider_builder(&config)?.build_with_wallet(wallet)?;
if let Some(interval) = tx_opts.poll_interval {
provider.client().set_poll_interval(Duration::from_secs(interval))
}
Ok(provider)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/debug.rs | crates/cast/src/debug.rs | use std::str::FromStr;
use alloy_chains::Chain;
use alloy_primitives::{Address, Bytes, map::HashMap};
use foundry_cli::utils::{TraceResult, print_traces};
use foundry_common::{ContractsByArtifact, compile::ProjectCompiler, shell};
use foundry_config::Config;
use foundry_debugger::Debugger;
use foundry_evm::traces::{
CallTraceDecoderBuilder, DebugTraceIdentifier,
debug::ContractSources,
identifier::{SignaturesIdentifier, TraceIdentifiers},
};
/// labels the traces, conditionally prints them or opens the debugger
#[expect(clippy::too_many_arguments)]
pub(crate) async fn handle_traces(
mut result: TraceResult,
config: &Config,
chain: Chain,
contracts_bytecode: &HashMap<Address, Bytes>,
labels: Vec<String>,
with_local_artifacts: bool,
debug: bool,
decode_internal: bool,
disable_label: bool,
trace_depth: Option<usize>,
) -> eyre::Result<()> {
let (known_contracts, mut sources) = if with_local_artifacts {
let _ = sh_println!("Compiling project to generate artifacts");
let project = config.project()?;
let compiler = ProjectCompiler::new();
let output = compiler.compile(&project)?;
(
Some(ContractsByArtifact::new(
output.artifact_ids().map(|(id, artifact)| (id, artifact.clone().into())),
)),
ContractSources::from_project_output(&output, project.root(), None)?,
)
} else {
(None, ContractSources::default())
};
let labels = labels.iter().filter_map(|label_str| {
let mut iter = label_str.split(':');
if let Some(addr) = iter.next()
&& let (Ok(address), Some(label)) = (Address::from_str(addr), iter.next())
{
return Some((address, label.to_string()));
}
None
});
let config_labels = config.labels.clone().into_iter();
let mut builder = CallTraceDecoderBuilder::new()
.with_labels(labels.chain(config_labels))
.with_signature_identifier(SignaturesIdentifier::from_config(config)?)
.with_label_disabled(disable_label);
let mut identifier = TraceIdentifiers::new().with_external(config, Some(chain))?;
if let Some(contracts) = &known_contracts {
builder = builder.with_known_contracts(contracts);
identifier = identifier.with_local_and_bytecodes(contracts, contracts_bytecode);
}
let mut decoder = builder.build();
for (_, trace) in result.traces.as_deref_mut().unwrap_or_default() {
decoder.identify(trace, &mut identifier);
}
if decode_internal || debug {
if let Some(ref etherscan_identifier) = identifier.external {
sources.merge(etherscan_identifier.get_compiled_contracts().await?);
}
if debug {
let mut debugger = Debugger::builder()
.traces(result.traces.expect("missing traces"))
.decoder(&decoder)
.sources(sources)
.build();
debugger.try_run_tui()?;
return Ok(());
}
decoder.debug_identifier = Some(DebugTraceIdentifier::new(sources));
}
print_traces(
&mut result,
&decoder,
shell::verbosity() > 0,
shell::verbosity() > 4,
trace_depth,
)
.await?;
Ok(())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/opts.rs | crates/cast/src/opts.rs | use crate::cmd::{
access_list::AccessListArgs, artifact::ArtifactArgs, b2e_payload::B2EPayloadArgs,
bind::BindArgs, call::CallArgs, constructor_args::ConstructorArgsArgs, create2::Create2Args,
creation_code::CreationCodeArgs, da_estimate::DAEstimateArgs, erc20::Erc20Subcommand,
estimate::EstimateArgs, find_block::FindBlockArgs, interface::InterfaceArgs, logs::LogsArgs,
mktx::MakeTxArgs, rpc::RpcArgs, run::RunArgs, send::SendTxArgs, storage::StorageArgs,
txpool::TxPoolSubcommands, wallet::WalletSubcommands,
};
use alloy_ens::NameOrAddress;
use alloy_primitives::{Address, B256, Selector, U256};
use alloy_rpc_types::BlockId;
use clap::{ArgAction, Parser, Subcommand, ValueHint};
use eyre::Result;
use foundry_cli::opts::{EtherscanOpts, GlobalArgs, RpcOpts};
use foundry_common::version::{LONG_VERSION, SHORT_VERSION};
use std::{path::PathBuf, str::FromStr};
/// A Swiss Army knife for interacting with Ethereum applications from the command line.
#[derive(Parser)]
#[command(
name = "cast",
version = SHORT_VERSION,
long_version = LONG_VERSION,
after_help = "Find more information in the book: https://getfoundry.sh/cast/overview",
next_display_order = None,
)]
pub struct Cast {
/// Include the global arguments.
#[command(flatten)]
pub global: GlobalArgs,
#[command(subcommand)]
pub cmd: CastSubcommand,
}
#[derive(Subcommand)]
pub enum CastSubcommand {
/// Prints the maximum value of the given integer type.
#[command(visible_aliases = &["--max-int", "maxi"])]
MaxInt {
/// The integer type to get the maximum value of.
#[arg(default_value = "int256")]
r#type: String,
},
/// Prints the minimum value of the given integer type.
#[command(visible_aliases = &["--min-int", "mini"])]
MinInt {
/// The integer type to get the minimum value of.
#[arg(default_value = "int256")]
r#type: String,
},
/// Prints the maximum value of the given integer type.
#[command(visible_aliases = &["--max-uint", "maxu"])]
MaxUint {
/// The unsigned integer type to get the maximum value of.
#[arg(default_value = "uint256")]
r#type: String,
},
/// Prints the zero address.
#[command(visible_aliases = &["--address-zero", "az"])]
AddressZero,
/// Prints the zero hash.
#[command(visible_aliases = &["--hash-zero", "hz"])]
HashZero,
/// Convert UTF8 text to hex.
#[command(
visible_aliases = &[
"--from-ascii",
"--from-utf8",
"from-ascii",
"fu",
"fa"]
)]
FromUtf8 {
/// The text to convert.
text: Option<String>,
},
/// Concatenate hex strings.
#[command(visible_aliases = &["--concat-hex", "ch"])]
ConcatHex {
/// The data to concatenate.
data: Vec<String>,
},
/// Convert binary data into hex data.
#[command(visible_aliases = &["--from-bin", "from-binx", "fb"])]
FromBin,
/// Normalize the input to lowercase, 0x-prefixed hex.
///
/// The input can be:
/// - mixed case hex with or without 0x prefix
/// - 0x prefixed hex, concatenated with a ':'
/// - an absolute path to file
/// - @tag, where the tag is defined in an environment variable
#[command(visible_aliases = &["--to-hexdata", "thd", "2hd"])]
ToHexdata {
/// The input to normalize.
input: Option<String>,
},
/// Convert an address to a checksummed format (EIP-55).
#[command(
visible_aliases = &["--to-checksum-address",
"--to-checksum",
"to-checksum",
"ta",
"2a"]
)]
ToCheckSumAddress {
/// The address to convert.
address: Option<Address>,
/// EIP-155 chain ID to encode the address using EIP-1191.
chain_id: Option<u64>,
},
/// Convert hex data to an ASCII string.
#[command(visible_aliases = &["--to-ascii", "tas", "2as"])]
ToAscii {
/// The hex data to convert.
hexdata: Option<String>,
},
/// Convert hex data to a utf-8 string.
#[command(visible_aliases = &["--to-utf8", "tu8", "2u8"])]
ToUtf8 {
/// The hex data to convert.
hexdata: Option<String>,
},
/// Convert a fixed point number into an integer.
#[command(visible_aliases = &["--from-fix", "ff"])]
FromFixedPoint {
/// The number of decimals to use.
decimals: Option<String>,
/// The value to convert.
#[arg(allow_hyphen_values = true)]
value: Option<String>,
},
/// Right-pads hex data to 32 bytes.
#[command(visible_aliases = &["--to-bytes32", "tb", "2b"])]
ToBytes32 {
/// The hex data to convert.
bytes: Option<String>,
},
/// Pads hex data to a specified length.
#[command(visible_aliases = &["pd"])]
Pad {
/// The hex data to pad.
data: Option<String>,
/// Right-pad the data (instead of left-pad).
#[arg(long)]
right: bool,
/// Left-pad the data (default).
#[arg(long, conflicts_with = "right")]
left: bool,
/// Target length in bytes (default: 32).
#[arg(long, default_value = "32")]
len: usize,
},
/// Convert an integer into a fixed point number.
#[command(visible_aliases = &["--to-fix", "tf", "2f"])]
ToFixedPoint {
/// The number of decimals to use.
decimals: Option<String>,
/// The value to convert.
#[arg(allow_hyphen_values = true)]
value: Option<String>,
},
/// Convert a number to a hex-encoded uint256.
#[command(name = "to-uint256", visible_aliases = &["--to-uint256", "tu", "2u"])]
ToUint256 {
/// The value to convert.
value: Option<String>,
},
/// Convert a number to a hex-encoded int256.
#[command(name = "to-int256", visible_aliases = &["--to-int256", "ti", "2i"])]
ToInt256 {
/// The value to convert.
value: Option<String>,
},
/// Perform a left shifting operation
#[command(name = "shl")]
LeftShift {
/// The value to shift.
value: String,
/// The number of bits to shift.
bits: String,
/// The input base.
#[arg(long)]
base_in: Option<String>,
/// The output base.
#[arg(long, default_value = "16")]
base_out: String,
},
/// Perform a right shifting operation
#[command(name = "shr")]
RightShift {
/// The value to shift.
value: String,
/// The number of bits to shift.
bits: String,
/// The input base,
#[arg(long)]
base_in: Option<String>,
/// The output base,
#[arg(long, default_value = "16")]
base_out: String,
},
/// Convert an ETH amount into another unit (ether, gwei or wei).
///
/// Examples:
/// - 1ether wei
/// - "1 ether" wei
/// - 1ether
/// - 1 gwei
/// - 1gwei ether
#[command(visible_aliases = &["--to-unit", "tun", "2un"])]
ToUnit {
/// The value to convert.
value: Option<String>,
/// The unit to convert to (ether, gwei, wei).
#[arg(default_value = "wei")]
unit: String,
},
/// Convert a number from decimal to smallest unit with arbitrary decimals.
///
/// Examples:
/// - 1.0 6 (for USDC, result: 1000000)
/// - 2.5 12 (for 12 decimals token, result: 2500000000000)
/// - 1.23 3 (for 3 decimals token, result: 1230)
#[command(visible_aliases = &["--parse-units", "pun"])]
ParseUnits {
/// The value to convert.
value: Option<String>,
/// The unit to convert to.
#[arg(default_value = "18")]
unit: u8,
},
/// Format a number from smallest unit to decimal with arbitrary decimals.
///
/// Examples:
/// - 1000000 6 (for USDC, result: 1.0)
/// - 2500000000000 12 (for 12 decimals, result: 2.5)
/// - 1230 3 (for 3 decimals, result: 1.23)
#[command(visible_aliases = &["--format-units", "fun"])]
FormatUnits {
/// The value to format.
value: Option<String>,
/// The unit to format to.
#[arg(default_value = "18")]
unit: u8,
},
/// Convert an ETH amount to wei.
///
/// Consider using --to-unit.
#[command(visible_aliases = &["--to-wei", "tw", "2w"])]
ToWei {
/// The value to convert.
#[arg(allow_hyphen_values = true)]
value: Option<String>,
/// The unit to convert from (ether, gwei, wei).
#[arg(default_value = "eth")]
unit: String,
},
/// Convert wei into an ETH amount.
///
/// Consider using --to-unit.
#[command(visible_aliases = &["--from-wei", "fw"])]
FromWei {
/// The value to convert.
#[arg(allow_hyphen_values = true)]
value: Option<String>,
/// The unit to convert from (ether, gwei, wei).
#[arg(default_value = "eth")]
unit: String,
},
/// RLP encodes hex data, or an array of hex data.
///
/// Accepts a hex-encoded string, or an array of hex-encoded strings.
/// Can be arbitrarily recursive.
///
/// Examples:
/// - `cast to-rlp "[]"` -> `0xc0`
/// - `cast to-rlp "0x22"` -> `0x22`
/// - `cast to-rlp "[\"0x61\"]"` -> `0xc161`
/// - `cast to-rlp "[\"0xf1\", \"f2\"]"` -> `0xc481f181f2`
#[command(visible_aliases = &["--to-rlp"])]
ToRlp {
/// The value to convert.
///
/// This is a hex-encoded string, or an array of hex-encoded strings.
/// Can be arbitrarily recursive.
value: Option<String>,
},
/// Decodes RLP hex-encoded data.
#[command(visible_aliases = &["--from-rlp"])]
FromRlp {
/// The RLP hex-encoded data.
value: Option<String>,
/// Decode the RLP data as int
#[arg(long, alias = "int")]
as_int: bool,
},
/// Converts a number of one base to another
#[command(visible_aliases = &["--to-hex", "th", "2h"])]
ToHex(ToBaseArgs),
/// Converts a number of one base to decimal
#[command(visible_aliases = &["--to-dec", "td", "2d"])]
ToDec(ToBaseArgs),
/// Converts a number of one base to another
#[command(
visible_aliases = &["--to-base",
"--to-radix",
"to-radix",
"tr",
"2r"]
)]
ToBase {
#[command(flatten)]
base: ToBaseArgs,
/// The output base.
#[arg(value_name = "BASE")]
base_out: Option<String>,
},
/// Create an access list for a transaction.
#[command(visible_aliases = &["ac", "acl"])]
AccessList(AccessListArgs),
/// Get logs by signature or topic.
#[command(visible_alias = "l")]
Logs(LogsArgs),
/// Get information about a block.
#[command(visible_alias = "bl")]
Block {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
block: Option<BlockId>,
/// If specified, only get the given field of the block.
#[arg(short, long = "field", aliases = ["fields"], num_args = 0.., action = ArgAction::Append, value_delimiter = ',')]
fields: Vec<String>,
/// Print the raw RLP encoded block header.
#[arg(long, conflicts_with = "fields")]
raw: bool,
#[arg(long, env = "CAST_FULL_BLOCK")]
full: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the latest block number.
#[command(visible_alias = "bn")]
BlockNumber {
/// The hash or tag to query. If not specified, the latest number is returned.
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Perform a call on an account without publishing a transaction.
#[command(visible_alias = "c")]
Call(CallArgs),
/// ABI-encode a function with arguments.
#[command(name = "calldata", visible_alias = "cd")]
CalldataEncode {
/// The function signature in the format `<name>(<in-types>)(<out-types>)`
sig: String,
/// The arguments to encode.
#[arg(allow_hyphen_values = true)]
args: Vec<String>,
// Path to file containing arguments to encode.
#[arg(long, value_name = "PATH")]
file: Option<PathBuf>,
},
/// Get the symbolic name of the current chain.
Chain {
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the Ethereum chain ID.
#[command(visible_aliases = &["ci", "cid"])]
ChainId {
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the current client version.
#[command(visible_alias = "cl")]
Client {
#[command(flatten)]
rpc: RpcOpts,
},
/// Compute the contract address from a given nonce and deployer address.
#[command(visible_alias = "ca")]
ComputeAddress {
/// The deployer address.
address: Option<Address>,
/// The nonce of the deployer address.
#[arg(
long,
conflicts_with = "salt",
conflicts_with = "init_code",
conflicts_with = "init_code_hash"
)]
nonce: Option<u64>,
/// The salt for CREATE2 address computation.
#[arg(long, conflicts_with = "nonce")]
salt: Option<B256>,
/// The init code for CREATE2 address computation.
#[arg(
long,
requires = "salt",
conflicts_with = "init_code_hash",
conflicts_with = "nonce"
)]
init_code: Option<String>,
/// The init code hash for CREATE2 address computation.
#[arg(long, requires = "salt", conflicts_with = "init_code", conflicts_with = "nonce")]
init_code_hash: Option<B256>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Disassembles a hex-encoded bytecode into a human-readable representation.
#[command(visible_alias = "da")]
Disassemble {
/// The hex-encoded bytecode.
bytecode: Option<String>,
},
/// Build and sign a transaction.
#[command(name = "mktx", visible_alias = "m")]
MakeTx(MakeTxArgs),
/// Calculate the ENS namehash of a name.
#[command(visible_aliases = &["na", "nh"])]
Namehash { name: Option<String> },
/// Get information about a transaction.
#[command(visible_alias = "t")]
Tx {
/// The transaction hash.
tx_hash: Option<String>,
/// The sender of the transaction.
#[arg(long, value_parser = NameOrAddress::from_str)]
from: Option<NameOrAddress>,
/// Nonce of the transaction.
#[arg(long)]
nonce: Option<u64>,
/// If specified, only get the given field of the transaction. If "raw", the RLP encoded
/// transaction will be printed.
field: Option<String>,
/// Print the raw RLP encoded transaction.
#[arg(long, conflicts_with = "field")]
raw: bool,
#[command(flatten)]
rpc: RpcOpts,
/// If specified, the transaction will be converted to a TransactionRequest JSON format.
#[arg(long)]
to_request: bool,
},
/// Get the transaction receipt for a transaction.
#[command(visible_alias = "re")]
Receipt {
/// The transaction hash.
tx_hash: String,
/// If specified, only get the given field of the transaction.
field: Option<String>,
/// The number of confirmations until the receipt is fetched
#[arg(long, default_value = "1")]
confirmations: u64,
/// Exit immediately if the transaction was not found.
#[arg(id = "async", long = "async", env = "CAST_ASYNC", alias = "cast-async")]
cast_async: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Sign and publish a transaction.
#[command(name = "send", visible_alias = "s")]
SendTx(SendTxArgs),
/// Publish a raw transaction to the network.
#[command(name = "publish", visible_alias = "p")]
PublishTx {
/// The raw transaction
raw_tx: String,
/// Only print the transaction hash and exit immediately.
#[arg(id = "async", long = "async", env = "CAST_ASYNC", alias = "cast-async")]
cast_async: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Estimate the gas cost of a transaction.
#[command(visible_alias = "e")]
Estimate(EstimateArgs),
/// Decode ABI-encoded input data.
///
/// Similar to `abi-decode --input`, but function selector MUST be prefixed in `calldata`
/// string
#[command(visible_aliases = &["calldata-decode", "--calldata-decode", "cdd"])]
DecodeCalldata {
/// The function signature in the format `<name>(<in-types>)(<out-types>)`.
sig: String,
/// The ABI-encoded calldata.
#[arg(required_unless_present = "file", index = 2)]
calldata: Option<String>,
/// Load ABI-encoded calldata from a file instead.
#[arg(long = "file", short = 'f', conflicts_with = "calldata")]
file: Option<PathBuf>,
},
/// Decode ABI-encoded string.
///
/// Similar to `calldata-decode --input`, but the function argument is a `string`
#[command(visible_aliases = &["string-decode", "--string-decode", "sd"])]
DecodeString {
/// The ABI-encoded string.
data: String,
},
/// Decode event data.
#[command(visible_aliases = &["event-decode", "--event-decode", "ed"])]
DecodeEvent {
/// The event signature. If none provided then tries to decode from local cache or <https://api.openchain.xyz>.
#[arg(long, visible_alias = "event-sig")]
sig: Option<String>,
/// The event data to decode.
data: String,
},
/// Decode custom error data.
#[command(visible_aliases = &["error-decode", "--error-decode", "erd"])]
DecodeError {
/// The error signature. If none provided then tries to decode from local cache or <https://api.openchain.xyz>.
#[arg(long, visible_alias = "error-sig")]
sig: Option<String>,
/// The error data to decode.
data: String,
},
/// Decode ABI-encoded input or output data.
///
/// Defaults to decoding output data. To decode input data pass --input.
///
/// When passing `--input`, function selector must NOT be prefixed in `calldata` string
#[command(name = "decode-abi", visible_aliases = &["abi-decode", "--abi-decode", "ad"])]
DecodeAbi {
/// The function signature in the format `<name>(<in-types>)(<out-types>)`.
sig: String,
/// The ABI-encoded calldata.
calldata: String,
/// Whether to decode the input or output data.
#[arg(long, short, help_heading = "Decode input data instead of output data")]
input: bool,
},
/// ABI encode the given function argument, excluding the selector.
#[command(visible_alias = "ae")]
AbiEncode {
/// The function signature.
sig: String,
/// Whether to use packed encoding.
#[arg(long)]
packed: bool,
/// The arguments of the function.
#[arg(allow_hyphen_values = true)]
args: Vec<String>,
},
/// ABI encode an event and its arguments to generate topics and data.
#[command(visible_alias = "aee")]
AbiEncodeEvent {
/// The event signature.
sig: String,
/// The arguments of the event.
#[arg(allow_hyphen_values = true)]
args: Vec<String>,
},
/// Compute the storage slot for an entry in a mapping.
#[command(visible_alias = "in")]
Index {
/// The mapping key type.
key_type: String,
/// The mapping key.
key: String,
/// The storage slot of the mapping.
slot_number: String,
},
/// Compute storage slots as specified by `ERC-7201: Namespaced Storage Layout`.
#[command(name = "index-erc7201", alias = "index-erc-7201", visible_aliases = &["index7201", "in7201"])]
IndexErc7201 {
/// The arbitrary identifier.
id: Option<String>,
/// The formula ID. Currently the only supported formula is `erc7201`.
#[arg(long, default_value = "erc7201")]
formula_id: String,
},
/// Fetch the EIP-1967 implementation for a contract
/// Can read from the implementation slot or the beacon slot.
#[command(visible_alias = "impl")]
Implementation {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// Fetch the implementation from the beacon slot.
///
/// If not specified, the implementation slot is used.
#[arg(long)]
beacon: bool,
/// The address for which the implementation will be fetched.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
#[command(flatten)]
rpc: RpcOpts,
},
/// Fetch the EIP-1967 admin account
#[command(visible_alias = "adm")]
Admin {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The address from which the admin account will be fetched.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the function signatures for the given selector from <https://openchain.xyz>.
#[command(name = "4byte", visible_aliases = &["4", "4b"])]
FourByte {
/// The function selector.
selector: Option<Selector>,
},
/// Decode ABI-encoded calldata using <https://openchain.xyz>.
#[command(name = "4byte-calldata", aliases = &["4byte-decode", "4d", "4bd"], visible_aliases = &["4c", "4bc"])]
FourByteCalldata {
/// The ABI-encoded calldata.
calldata: Option<String>,
},
/// Get the event signature for a given topic 0 from <https://openchain.xyz>.
#[command(name = "4byte-event", visible_aliases = &["4e", "4be", "topic0-event", "t0e"])]
FourByteEvent {
/// Topic 0
#[arg(value_name = "TOPIC_0")]
topic: Option<B256>,
},
/// Upload the given signatures to <https://openchain.xyz>.
///
/// Example inputs:
/// - "transfer(address,uint256)"
/// - "function transfer(address,uint256)"
/// - "function transfer(address,uint256)" "event Transfer(address,address,uint256)"
/// - "./out/Contract.sol/Contract.json"
#[command(visible_aliases = &["ups"])]
UploadSignature {
/// The signatures to upload.
///
/// Prefix with 'function', 'event', or 'error'. Defaults to function if no prefix given.
/// Can also take paths to contract artifact JSON.
signatures: Vec<String>,
},
/// Pretty print calldata.
///
/// Tries to decode the calldata using <https://openchain.xyz> unless --offline is passed.
#[command(visible_alias = "pc")]
PrettyCalldata {
/// The calldata.
calldata: Option<String>,
/// Skip the <https://openchain.xyz> lookup.
#[arg(long, short)]
offline: bool,
},
/// Get the timestamp of a block.
#[command(visible_alias = "a")]
Age {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the balance of an account in wei.
#[command(visible_alias = "b")]
Balance {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The account to query.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
/// Format the balance in ether.
#[arg(long, short)]
ether: bool,
#[command(flatten)]
rpc: RpcOpts,
/// erc20 address to query, with the method `balanceOf(address) return (uint256)`, alias
/// with '--erc721'
#[arg(long, alias = "erc721")]
erc20: Option<Address>,
},
/// Get the basefee of a block.
#[command(visible_aliases = &["ba", "fee", "basefee"])]
BaseFee {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the runtime bytecode of a contract.
#[command(visible_alias = "co")]
Code {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The contract address.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
/// Disassemble bytecodes.
#[arg(long, short)]
disassemble: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the runtime bytecode size of a contract.
#[command(visible_alias = "cs")]
Codesize {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The contract address.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the current gas price.
#[command(visible_alias = "g")]
GasPrice {
#[command(flatten)]
rpc: RpcOpts,
},
/// Generate event signatures from event string.
#[command(visible_alias = "se")]
SigEvent {
/// The event string.
event_string: Option<String>,
},
/// Hash arbitrary data using Keccak-256.
#[command(visible_aliases = &["k", "keccak256"])]
Keccak {
/// The data to hash.
data: Option<String>,
},
/// Hash a message according to EIP-191.
#[command(visible_aliases = &["--hash-message", "hm"])]
HashMessage {
/// The message to hash.
message: Option<String>,
},
/// Perform an ENS lookup.
#[command(visible_alias = "rn")]
ResolveName {
/// The name to lookup.
who: Option<String>,
/// Perform a reverse lookup to verify that the name is correct.
#[arg(long)]
verify: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Perform an ENS reverse lookup.
#[command(visible_alias = "la")]
LookupAddress {
/// The account to perform the lookup for.
who: Option<Address>,
/// Perform a normal lookup to verify that the address is correct.
#[arg(long)]
verify: bool,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the raw value of a contract's storage slot.
#[command(visible_alias = "st")]
Storage(StorageArgs),
/// Generate a storage proof for a given storage slot.
#[command(visible_alias = "pr")]
Proof {
/// The contract address.
#[arg(value_parser = NameOrAddress::from_str)]
address: NameOrAddress,
/// The storage slot numbers (hex or decimal).
#[arg(value_parser = parse_slot)]
slots: Vec<B256>,
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the nonce for an account.
#[command(visible_alias = "n")]
Nonce {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The address to get the nonce for.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the codehash for an account.
#[command()]
Codehash {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The address to get the codehash for.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
/// The storage slot numbers (hex or decimal).
#[arg(value_parser = parse_slot)]
slots: Vec<B256>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the storage root for an account.
#[command(visible_alias = "sr")]
StorageRoot {
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// The address to get the storage root for.
#[arg(value_parser = NameOrAddress::from_str)]
who: NameOrAddress,
/// The storage slot numbers (hex or decimal).
#[arg(value_parser = parse_slot)]
slots: Vec<B256>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Get the source code of a contract from a block explorer.
#[command(visible_aliases = &["et", "src"])]
Source {
/// The contract's address.
address: String,
/// Whether to flatten the source code.
#[arg(long, short)]
flatten: bool,
/// The output directory/file to expand source tree into.
#[arg(short, value_hint = ValueHint::DirPath, alias = "path")]
directory: Option<PathBuf>,
#[command(flatten)]
etherscan: EtherscanOpts,
/// Alternative explorer API URL to use that adheres to the Etherscan API. If not provided,
/// defaults to Etherscan.
#[arg(long, env = "EXPLORER_API_URL")]
explorer_api_url: Option<String>,
/// Alternative explorer browser URL.
#[arg(long, env = "EXPLORER_URL")]
explorer_url: Option<String>,
},
/// Wallet management utilities.
#[command(visible_alias = "w")]
Wallet {
#[command(subcommand)]
command: WalletSubcommands,
},
/// Download a contract creation code from Etherscan and RPC.
#[command(visible_alias = "cc")]
CreationCode(CreationCodeArgs),
/// Generate an artifact file, that can be used to deploy a contract locally.
#[command(visible_alias = "ar")]
Artifact(ArtifactArgs),
/// Display constructor arguments used for the contract initialization.
#[command(visible_alias = "cra")]
ConstructorArgs(ConstructorArgsArgs),
/// Generate a Solidity interface from a given ABI.
///
/// Currently does not support ABI encoder v2.
#[command(visible_alias = "i")]
Interface(InterfaceArgs),
/// Generate a rust binding from a given ABI.
#[command(visible_alias = "bi")]
Bind(BindArgs),
/// Convert Beacon payload to execution payload.
#[command(visible_alias = "b2e")]
B2EPayload(B2EPayloadArgs),
/// Get the selector for a function.
#[command(visible_alias = "si")]
Sig {
/// The function signature, e.g. transfer(address,uint256).
sig: Option<String>,
/// Optimize signature to contain provided amount of leading zeroes in selector.
optimize: Option<usize>,
},
/// Generate a deterministic contract address using CREATE2.
#[command(visible_alias = "c2")]
Create2(Create2Args),
/// Get the block number closest to the provided timestamp.
#[command(visible_alias = "f")]
FindBlock(FindBlockArgs),
/// Generate shell completions script.
#[command(visible_alias = "com")]
Completions {
#[arg(value_enum)]
shell: foundry_cli::clap::Shell,
},
/// Runs a published transaction in a local environment and prints the trace.
#[command(visible_alias = "r")]
Run(RunArgs),
/// Perform a raw JSON-RPC request.
#[command(visible_alias = "rp")]
Rpc(RpcArgs),
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/rlp_converter.rs | crates/cast/src/rlp_converter.rs | use alloy_primitives::{U256, hex};
use alloy_rlp::{Buf, Decodable, Encodable, Header};
use eyre::Context;
use serde_json::Value;
use std::fmt;
/// Arbitrary nested data.
///
/// - `Item::Array(vec![])` is equivalent to `[]`.
/// - `Item::Array(vec![Item::Data(vec![])])` is equivalent to `[""]` or `[null]`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Item {
Data(Vec<u8>),
Array(Vec<Self>),
}
impl Encodable for Item {
fn encode(&self, out: &mut dyn alloy_rlp::BufMut) {
match self {
Self::Array(arr) => arr.encode(out),
Self::Data(data) => <[u8]>::encode(data, out),
}
}
}
impl Decodable for Item {
fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> {
let h = Header::decode(buf)?;
if buf.len() < h.payload_length {
return Err(alloy_rlp::Error::InputTooShort);
}
let mut d = &buf[..h.payload_length];
let r = if h.list {
let view = &mut d;
let mut v = Vec::new();
while !view.is_empty() {
v.push(Self::decode(view)?);
}
Ok(Self::Array(v))
} else {
Ok(Self::Data(d.to_vec()))
};
buf.advance(h.payload_length);
r
}
}
impl Item {
pub(crate) fn value_to_item(value: &Value) -> eyre::Result<Self> {
match value {
Value::Null => Ok(Self::Data(vec![])),
Value::Bool(_) => {
eyre::bail!("RLP input can not contain booleans")
}
Value::Number(n) => {
Ok(Self::Data(n.to_string().parse::<U256>()?.to_be_bytes_trimmed_vec()))
}
Value::String(s) => Ok(Self::Data(hex::decode(s).wrap_err("Could not decode hex")?)),
Value::Array(values) => values.iter().map(Self::value_to_item).collect(),
Value::Object(_) => {
eyre::bail!("RLP input can not contain objects")
}
}
}
}
impl FromIterator<Self> for Item {
fn from_iter<T: IntoIterator<Item = Self>>(iter: T) -> Self {
Self::Array(Vec::from_iter(iter))
}
}
// Display as hex values
impl fmt::Display for Item {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Data(dat) => {
write!(f, "\"0x{}\"", hex::encode(dat))?;
}
Self::Array(items) => {
f.write_str("[")?;
for (i, item) in items.iter().enumerate() {
if i > 0 {
f.write_str(",")?;
}
fmt::Display::fmt(item, f)?;
}
f.write_str("]")?;
}
};
Ok(())
}
}
#[cfg(test)]
mod test {
use crate::rlp_converter::Item;
use alloy_primitives::hex;
use alloy_rlp::{Bytes, Decodable};
use serde_json::Result as JsonResult;
// https://en.wikipedia.org/wiki/Set-theoretic_definition_of_natural_numbers
fn array_von_neuman() -> Item {
Item::Array(vec![
Item::Array(vec![]),
Item::Array(vec![Item::Array(vec![])]),
Item::Array(vec![Item::Array(vec![]), Item::Array(vec![Item::Array(vec![])])]),
])
}
#[test]
#[expect(clippy::disallowed_macros)]
fn encode_decode_test() -> alloy_rlp::Result<()> {
let parameters = vec![
(1, b"\xc0".to_vec(), Item::Array(vec![])),
(2, b"\xc1\x80".to_vec(), Item::Array(vec![Item::Data(vec![])])),
(3, b"\xc4\x83dog".to_vec(), Item::Array(vec![Item::Data(vec![0x64, 0x6f, 0x67])])),
(
4,
b"\xc5\xc4\x83dog".to_vec(),
Item::Array(vec![Item::Array(vec![Item::Data(vec![0x64, 0x6f, 0x67])])]),
),
(
5,
b"\xc8\x83dog\x83cat".to_vec(),
Item::Array(vec![
Item::Data(vec![0x64, 0x6f, 0x67]),
Item::Data(vec![0x63, 0x61, 0x74]),
]),
),
(6, b"\xc7\xc0\xc1\xc0\xc3\xc0\xc1\xc0".to_vec(), array_von_neuman()),
(
7,
b"\xcd\x83\x6c\x6f\x6c\xc3\xc2\xc1\xc0\xc4\x83\x6f\x6c\x6f".to_vec(),
Item::Array(vec![
Item::Data(vec![b'\x6c', b'\x6f', b'\x6c']),
Item::Array(vec![Item::Array(vec![Item::Array(vec![Item::Array(vec![])])])]),
Item::Array(vec![Item::Data(vec![b'\x6f', b'\x6c', b'\x6f'])]),
]),
),
];
for params in parameters {
let encoded = alloy_rlp::encode(¶ms.2);
assert_eq!(Item::decode(&mut &encoded[..])?, params.2);
let decoded = Item::decode(&mut ¶ms.1[..])?;
assert_eq!(alloy_rlp::encode(&decoded), params.1);
println!("case {} validated", params.0)
}
Ok(())
}
#[test]
#[expect(clippy::disallowed_macros)]
fn deserialize_from_str_test_hex() -> JsonResult<()> {
let parameters = vec![
(1, "[\"\"]", Item::Array(vec![Item::Data(vec![])])),
(2, "[\"0x646f67\"]", Item::Array(vec![Item::Data(vec![0x64, 0x6f, 0x67])])),
(
3,
"[[\"646f67\"]]",
Item::Array(vec![Item::Array(vec![Item::Data(vec![0x64, 0x6f, 0x67])])]),
),
(
4,
"[\"646f67\",\"0x636174\"]",
Item::Array(vec![
Item::Data(vec![0x64, 0x6f, 0x67]),
Item::Data(vec![0x63, 0x61, 0x74]),
]),
),
(6, "[[],[[]],[[],[[]]]]", array_von_neuman()),
];
for params in parameters {
let val = serde_json::from_str(params.1)?;
let item = Item::value_to_item(&val).unwrap();
assert_eq!(item, params.2);
println!("case {} validated", params.0);
}
Ok(())
}
#[test]
fn rlp_data() {
// <https://github.com/foundry-rs/foundry/issues/9197>
let hex_val_rlp = hex!("820002");
let item = Item::decode(&mut &hex_val_rlp[..]).unwrap();
let data = hex!("0002");
let encoded = alloy_rlp::encode(&data[..]);
let decoded: Bytes = alloy_rlp::decode_exact(&encoded[..]).unwrap();
assert_eq!(Item::Data(decoded.to_vec()), item);
let hex_val_rlp = hex!("00");
let item = Item::decode(&mut &hex_val_rlp[..]).unwrap();
let data = hex!("00");
let encoded = alloy_rlp::encode(&data[..]);
let decoded: Bytes = alloy_rlp::decode_exact(&encoded[..]).unwrap();
assert_eq!(Item::Data(decoded.to_vec()), item);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/da_estimate.rs | crates/cast/src/cmd/da_estimate.rs | //! Estimates the data availability size of a block for opstack.
use alloy_consensus::BlockHeader;
use alloy_network::eip2718::Encodable2718;
use alloy_provider::Provider;
use alloy_rpc_types::BlockId;
use clap::Parser;
use foundry_cli::{
opts::RpcOpts,
utils::{self, LoadConfig},
};
use foundry_primitives::FoundryTxEnvelope;
/// CLI arguments for `cast da-estimate`.
#[derive(Debug, Parser)]
pub struct DAEstimateArgs {
/// The block to estimate the data availability size for.
pub block: BlockId,
#[command(flatten)]
pub rpc: RpcOpts,
}
impl DAEstimateArgs {
/// Load the RPC URL from the config file.
pub async fn run(self) -> eyre::Result<()> {
let Self { block, rpc } = self;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let block = provider
.get_block(block)
.full()
.await?
.ok_or_else(|| eyre::eyre!("Block not found"))?;
let block_number = block.header.number();
let tx_count = block.transactions.len();
let mut da_estimate = 0;
for tx in block.into_transactions_iter() {
// convert into FoundryTxEnvelope to support all foundry tx types
let tx = FoundryTxEnvelope::try_from(tx)?;
da_estimate += op_alloy_flz::tx_estimated_size_fjord(&tx.encoded_2718());
}
sh_println!(
"Estimated data availability size for block {block_number} with {tx_count} transactions: {da_estimate}"
)?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/creation_code.rs | crates/cast/src/cmd/creation_code.rs | use super::interface::load_abi_from_file;
use crate::SimpleCast;
use alloy_consensus::Transaction;
use alloy_primitives::{Address, Bytes};
use alloy_provider::{Provider, ext::TraceApi};
use alloy_rpc_types::trace::parity::{Action, CreateAction, CreateOutput, TraceOutput};
use clap::Parser;
use eyre::{OptionExt, Result, eyre};
use foundry_block_explorers::Client;
use foundry_cli::{
opts::{EtherscanOpts, RpcOpts},
utils::{self, LoadConfig, fetch_abi_from_etherscan},
};
use foundry_common::provider::RetryProvider;
use foundry_config::Config;
foundry_config::impl_figment_convert!(CreationCodeArgs, etherscan, rpc);
/// CLI arguments for `cast creation-code`.
#[derive(Parser)]
pub struct CreationCodeArgs {
/// An Ethereum address, for which the bytecode will be fetched.
contract: Address,
/// Path to file containing the contract's JSON ABI. It's necessary if the target contract is
/// not verified on Etherscan.
#[arg(long)]
abi_path: Option<String>,
/// Disassemble bytecodes into individual opcodes.
#[arg(long)]
disassemble: bool,
/// Return creation bytecode without constructor arguments appended.
#[arg(long, conflicts_with = "only_args")]
without_args: bool,
/// Return only constructor arguments.
#[arg(long)]
only_args: bool,
#[command(flatten)]
etherscan: EtherscanOpts,
#[command(flatten)]
rpc: RpcOpts,
}
impl CreationCodeArgs {
pub async fn run(self) -> Result<()> {
let mut config = self.load_config()?;
let Self { contract, disassemble, without_args, only_args, abi_path, etherscan: _, rpc: _ } =
self;
let provider = utils::get_provider(&config)?;
let chain = provider.get_chain_id().await?;
config.chain = Some(chain.into());
let bytecode = fetch_creation_code_from_etherscan(contract, &config, provider).await?;
let bytecode = parse_code_output(
bytecode,
contract,
&config,
abi_path.as_deref(),
without_args,
only_args,
)
.await?;
if disassemble {
let _ = sh_println!("{}", SimpleCast::disassemble(&bytecode)?);
} else {
let _ = sh_println!("{bytecode}");
}
Ok(())
}
}
/// Parses the creation bytecode and returns one of the following:
/// - The complete bytecode
/// - The bytecode without constructor arguments
/// - Only the constructor arguments
pub async fn parse_code_output(
bytecode: Bytes,
contract: Address,
config: &Config,
abi_path: Option<&str>,
without_args: bool,
only_args: bool,
) -> Result<Bytes> {
if !without_args && !only_args {
return Ok(bytecode);
}
let abi = if let Some(abi_path) = abi_path {
load_abi_from_file(abi_path, None)?
} else {
fetch_abi_from_etherscan(contract, config).await?
};
let abi = abi.into_iter().next().ok_or_eyre("No ABI found.")?;
let (abi, _) = abi;
if abi.constructor.is_none() {
if only_args {
return Err(eyre!("No constructor found."));
}
return Ok(bytecode);
}
let constructor = abi.constructor.unwrap();
if constructor.inputs.is_empty() {
if only_args {
return Err(eyre!("No constructor arguments found."));
}
return Ok(bytecode);
}
let args_size = constructor.inputs.len() * 32;
let bytecode = if without_args {
Bytes::from(bytecode[..bytecode.len() - args_size].to_vec())
} else if only_args {
Bytes::from(bytecode[bytecode.len() - args_size..].to_vec())
} else {
unreachable!();
};
Ok(bytecode)
}
/// Fetches the creation code of a contract from Etherscan and RPC.
pub async fn fetch_creation_code_from_etherscan(
contract: Address,
config: &Config,
provider: RetryProvider,
) -> Result<Bytes> {
let chain = config.chain.unwrap_or_default();
let api_key = config.get_etherscan_api_key(Some(chain)).unwrap_or_default();
let client = Client::new(chain, api_key)?;
let creation_data = client.contract_creation_data(contract).await?;
let creation_tx_hash = creation_data.transaction_hash;
let tx_data = provider.get_transaction_by_hash(creation_tx_hash).await?;
let tx_data = tx_data.ok_or_eyre("Could not find creation tx data.")?;
let bytecode = if tx_data.to().is_none() {
// Contract was created using a standard transaction
tx_data.input().clone()
} else {
// Contract was created using a factory pattern or create2
// Extract creation code from tx traces
let mut creation_bytecode = None;
let traces = provider.trace_transaction(creation_tx_hash).await.map_err(|e| {
eyre!("Could not fetch traces for transaction {}: {}", creation_tx_hash, e)
})?;
for trace in traces {
if let Some(TraceOutput::Create(CreateOutput { address, .. })) = trace.trace.result
&& address == contract
{
creation_bytecode = match trace.trace.action {
Action::Create(CreateAction { init, .. }) => Some(init),
_ => None,
};
}
}
creation_bytecode.ok_or_else(|| eyre!("Could not find contract creation trace."))?
};
Ok(bytecode)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/bind.rs | crates/cast/src/cmd/bind.rs | use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::opts::EtherscanOpts;
use std::path::PathBuf;
const DEFAULT_CRATE_NAME: &str = "foundry-contracts";
const DEFAULT_CRATE_VERSION: &str = "0.0.1";
/// CLI arguments for `cast bind`.
#[derive(Clone, Debug, Parser)]
pub struct BindArgs {
/// The contract address, or the path to an ABI Directory
///
/// If an address is specified, then the ABI is fetched from Etherscan.
path_or_address: String,
/// Path to where bindings will be stored
#[arg(
short,
long,
value_hint = ValueHint::DirPath,
value_name = "PATH"
)]
pub output_dir: Option<PathBuf>,
/// The name of the Rust crate to generate.
///
/// This should be a valid crates.io crate name. However, this is currently not validated by
/// this command.
#[arg(
long,
default_value = DEFAULT_CRATE_NAME,
value_name = "NAME"
)]
crate_name: String,
/// The version of the Rust crate to generate.
///
/// This should be a standard semver version string. However, it is not currently validated by
/// this command.
#[arg(
long,
default_value = DEFAULT_CRATE_VERSION,
value_name = "VERSION"
)]
crate_version: String,
/// Generate bindings as separate files.
#[arg(long)]
separate_files: bool,
#[command(flatten)]
etherscan: EtherscanOpts,
}
impl BindArgs {
pub async fn run(self) -> Result<()> {
Err(eyre::eyre!(
"`cast bind` has been removed.\n\
Please use `cast source` to create a Forge project from a block explorer source\n\
and `forge bind` to generate the bindings to it instead."
))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/estimate.rs | crates/cast/src/cmd/estimate.rs | use crate::tx::{CastTxBuilder, SenderKind};
use alloy_ens::NameOrAddress;
use alloy_primitives::U256;
use alloy_provider::Provider;
use alloy_rpc_types::BlockId;
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::{RpcOpts, TransactionOpts},
utils::{self, LoadConfig, parse_ether_value},
};
use foundry_wallets::WalletOpts;
use std::str::FromStr;
/// CLI arguments for `cast estimate`.
#[derive(Debug, Parser)]
pub struct EstimateArgs {
/// The destination of the transaction.
#[arg(value_parser = NameOrAddress::from_str)]
to: Option<NameOrAddress>,
/// The signature of the function to call.
sig: Option<String>,
/// The arguments of the function to call.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
/// Calculate the cost of a transaction using the network gas price.
///
/// If not specified the amount of gas will be estimated.
#[arg(long)]
cost: bool,
#[command(flatten)]
wallet: WalletOpts,
#[command(subcommand)]
command: Option<EstimateSubcommands>,
#[command(flatten)]
tx: TransactionOpts,
#[command(flatten)]
rpc: RpcOpts,
}
#[derive(Debug, Parser)]
pub enum EstimateSubcommands {
/// Estimate gas cost to deploy a smart contract
#[command(name = "--create")]
Create {
/// The bytecode of contract
code: String,
/// The signature of the constructor
sig: Option<String>,
/// Constructor arguments
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
/// Ether to send in the transaction
///
/// Either specified in wei, or as a string with a unit type:
///
/// Examples: 1ether, 10gwei, 0.01ether
#[arg(long, value_parser = parse_ether_value)]
value: Option<U256>,
},
}
impl EstimateArgs {
pub async fn run(self) -> Result<()> {
let Self { to, mut sig, mut args, mut tx, block, cost, wallet, rpc, command } = self;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let sender = SenderKind::from_wallet_opts(wallet).await?;
let code = if let Some(EstimateSubcommands::Create {
code,
sig: create_sig,
args: create_args,
value,
}) = command
{
sig = create_sig;
args = create_args;
if let Some(value) = value {
tx.value = Some(value);
}
Some(code)
} else {
None
};
let (tx, _) = CastTxBuilder::new(&provider, tx, &config)
.await?
.with_to(to)
.await?
.with_code_sig_and_args(code, sig, args)
.await?
.build_raw(sender)
.await?;
let gas = provider.estimate_gas(tx).block(block.unwrap_or_default()).await?;
if cost {
let gas_price_wei = provider.get_gas_price().await?;
let cost = gas_price_wei * gas as u128;
let cost_eth = cost as f64 / 1e18;
sh_println!("{cost_eth}")?;
} else {
sh_println!("{gas}")?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_estimate_value() {
let args: EstimateArgs = EstimateArgs::parse_from(["foundry-cli", "--value", "100"]);
assert!(args.tx.value.is_some());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/artifact.rs | crates/cast/src/cmd/artifact.rs | use super::{
creation_code::{fetch_creation_code_from_etherscan, parse_code_output},
interface::load_abi_from_file,
};
use alloy_primitives::Address;
use alloy_provider::Provider;
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::{EtherscanOpts, RpcOpts},
utils::{self, LoadConfig, fetch_abi_from_etherscan},
};
use foundry_common::fs;
use serde_json::json;
use std::path::PathBuf;
foundry_config::impl_figment_convert!(ArtifactArgs, etherscan, rpc);
/// CLI arguments for `cast artifact`.
#[derive(Parser)]
pub struct ArtifactArgs {
/// An Ethereum address, for which the artifact will be produced.
contract: Address,
/// Path to file containing the contract's JSON ABI. It's necessary if the target contract is
/// not verified on Etherscan.
#[arg(long)]
abi_path: Option<String>,
/// The path to the output file.
///
/// If not specified, the artifact will be output to stdout.
#[arg(
short,
long,
value_hint = clap::ValueHint::FilePath,
value_name = "PATH",
)]
output: Option<PathBuf>,
#[command(flatten)]
etherscan: EtherscanOpts,
#[command(flatten)]
rpc: RpcOpts,
}
impl ArtifactArgs {
pub async fn run(self) -> Result<()> {
let mut config = self.load_config()?;
let Self { contract, output: output_location, abi_path, etherscan: _, rpc: _ } = self;
let provider = utils::get_provider(&config)?;
let chain = provider.get_chain_id().await?;
config.chain = Some(chain.into());
let abi = if let Some(ref abi_path) = abi_path {
load_abi_from_file(abi_path, None)?
} else {
fetch_abi_from_etherscan(contract, &config).await?
};
let (abi, _) = abi.first().ok_or_else(|| eyre::eyre!("No ABI found"))?;
let bytecode = fetch_creation_code_from_etherscan(contract, &config, provider).await?;
let bytecode =
parse_code_output(bytecode, contract, &config, abi_path.as_deref(), true, false)
.await?;
let artifact = json!({
"abi": abi,
"bytecode": {
"object": bytecode
}
});
let artifact = serde_json::to_string_pretty(&artifact)?;
if let Some(loc) = output_location {
if let Some(parent) = loc.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&loc, artifact)?;
sh_println!("Saved artifact at {}", loc.display())?;
} else {
sh_println!("{artifact}")?;
}
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/call.rs | crates/cast/src/cmd/call.rs | use super::run::fetch_contracts_bytecode_from_trace;
use crate::{
Cast,
debug::handle_traces,
traces::TraceKind,
tx::{CastTxBuilder, SenderKind},
};
use alloy_ens::NameOrAddress;
use alloy_primitives::{Address, B256, Bytes, TxKind, U256, map::HashMap};
use alloy_provider::Provider;
use alloy_rpc_types::{
BlockId, BlockNumberOrTag, BlockOverrides,
state::{StateOverride, StateOverridesBuilder},
};
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::{ChainValueParser, RpcOpts, TransactionOpts},
utils::{self, TraceResult, parse_ether_value},
};
use foundry_common::shell;
use foundry_compilers::artifacts::EvmVersion;
use foundry_config::{
Chain, Config,
figment::{
self, Metadata, Profile,
value::{Dict, Map},
},
};
use foundry_evm::{
executors::TracingExecutor,
opts::EvmOpts,
traces::{InternalTraceMode, TraceMode},
};
use foundry_wallets::WalletOpts;
use itertools::Either;
use regex::Regex;
use revm::context::TransactionType;
use std::{str::FromStr, sync::LazyLock};
// matches override pattern <address>:<slot>:<value>
// e.g. 0x123:0x1:0x1234
static OVERRIDE_PATTERN: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^([^:]+):([^:]+):([^:]+)$").unwrap());
/// CLI arguments for `cast call`.
///
/// ## State Override Flags
///
/// The following flags can be used to override the state for the call:
///
/// * `--override-balance <address>:<balance>` - Override the balance of an account
/// * `--override-nonce <address>:<nonce>` - Override the nonce of an account
/// * `--override-code <address>:<code>` - Override the code of an account
/// * `--override-state <address>:<slot>:<value>` - Override a storage slot of an account
///
/// Multiple overrides can be specified for the same account. For example:
///
/// ```bash
/// cast call 0x... "transfer(address,uint256)" 0x... 100 \
/// --override-balance 0x123:0x1234 \
/// --override-nonce 0x123:1 \
/// --override-code 0x123:0x1234 \
/// --override-state 0x123:0x1:0x1234
/// --override-state-diff 0x123:0x1:0x1234
/// ```
#[derive(Debug, Parser)]
pub struct CallArgs {
/// The destination of the transaction.
#[arg(value_parser = NameOrAddress::from_str)]
to: Option<NameOrAddress>,
/// The signature of the function to call.
sig: Option<String>,
/// The arguments of the function to call.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
/// Raw hex-encoded data for the transaction. Used instead of \[SIG\] and \[ARGS\].
#[arg(
long,
conflicts_with_all = &["sig", "args"]
)]
data: Option<String>,
/// Forks the remote rpc, executes the transaction locally and prints a trace
#[arg(long, default_value_t = false)]
trace: bool,
/// Disables the labels in the traces.
/// Can only be set with `--trace`.
#[arg(long, default_value_t = false, requires = "trace")]
disable_labels: bool,
/// Opens an interactive debugger.
/// Can only be used with `--trace`.
#[arg(long, requires = "trace")]
debug: bool,
/// Identify internal functions in traces.
///
/// This will trace internal functions and decode stack parameters.
///
/// Parameters stored in memory (such as bytes or arrays) are currently decoded only when a
/// single function is matched, similarly to `--debug`, for performance reasons.
#[arg(long, requires = "trace")]
decode_internal: bool,
/// Labels to apply to the traces; format: `address:label`.
/// Can only be used with `--trace`.
#[arg(long, requires = "trace")]
labels: Vec<String>,
/// The EVM Version to use.
/// Can only be used with `--trace`.
#[arg(long, requires = "trace")]
evm_version: Option<EvmVersion>,
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short)]
block: Option<BlockId>,
#[command(subcommand)]
command: Option<CallSubcommands>,
#[command(flatten)]
tx: TransactionOpts,
#[command(flatten)]
rpc: RpcOpts,
#[command(flatten)]
wallet: WalletOpts,
#[arg(
short,
long,
alias = "chain-id",
env = "CHAIN",
value_parser = ChainValueParser::default(),
)]
pub chain: Option<Chain>,
/// Use current project artifacts for trace decoding.
#[arg(long, visible_alias = "la")]
pub with_local_artifacts: bool,
/// Override the accounts balance.
/// Format: "address:balance,address:balance"
#[arg(long = "override-balance", value_name = "ADDRESS:BALANCE", value_delimiter = ',')]
pub balance_overrides: Option<Vec<String>>,
/// Override the accounts nonce.
/// Format: "address:nonce,address:nonce"
#[arg(long = "override-nonce", value_name = "ADDRESS:NONCE", value_delimiter = ',')]
pub nonce_overrides: Option<Vec<String>>,
/// Override the accounts code.
/// Format: "address:code,address:code"
#[arg(long = "override-code", value_name = "ADDRESS:CODE", value_delimiter = ',')]
pub code_overrides: Option<Vec<String>>,
/// Override the accounts state and replace the current state entirely with the new one.
/// Format: "address:slot:value,address:slot:value"
#[arg(long = "override-state", value_name = "ADDRESS:SLOT:VALUE", value_delimiter = ',')]
pub state_overrides: Option<Vec<String>>,
/// Override the accounts state specific slots and preserve the rest of the state.
/// Format: "address:slot:value,address:slot:value"
#[arg(long = "override-state-diff", value_name = "ADDRESS:SLOT:VALUE", value_delimiter = ',')]
pub state_diff_overrides: Option<Vec<String>>,
/// Override the block timestamp.
#[arg(long = "block.time", value_name = "TIME")]
pub block_time: Option<u64>,
/// Override the block number.
#[arg(long = "block.number", value_name = "NUMBER")]
pub block_number: Option<u64>,
}
#[derive(Debug, Parser)]
pub enum CallSubcommands {
/// ignores the address field and simulates creating a contract
#[command(name = "--create")]
Create {
/// Bytecode of contract.
code: String,
/// The signature of the constructor.
sig: Option<String>,
/// The arguments of the constructor.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
/// Ether to send in the transaction.
///
/// Either specified in wei, or as a string with a unit type.
///
/// Examples: 1ether, 10gwei, 0.01ether
#[arg(long, value_parser = parse_ether_value)]
value: Option<U256>,
},
}
impl CallArgs {
pub async fn run(self) -> Result<()> {
let figment = self.rpc.clone().into_figment(self.with_local_artifacts).merge(&self);
let evm_opts = figment.extract::<EvmOpts>()?;
let mut config = Config::from_provider(figment)?.sanitized();
let state_overrides = self.get_state_overrides()?;
let block_overrides = self.get_block_overrides()?;
let Self {
to,
mut sig,
mut args,
mut tx,
command,
block,
trace,
evm_version,
debug,
decode_internal,
labels,
data,
with_local_artifacts,
disable_labels,
wallet,
..
} = self;
if let Some(data) = data {
sig = Some(data);
}
let provider = utils::get_provider(&config)?;
let sender = SenderKind::from_wallet_opts(wallet).await?;
let from = sender.address();
let code = if let Some(CallSubcommands::Create {
code,
sig: create_sig,
args: create_args,
value,
}) = command
{
sig = create_sig;
args = create_args;
if let Some(value) = value {
tx.value = Some(value);
}
Some(code)
} else {
None
};
let (tx, func) = CastTxBuilder::new(&provider, tx, &config)
.await?
.with_to(to)
.await?
.with_code_sig_and_args(code, sig, args)
.await?
.build_raw(sender)
.await?;
if trace {
if let Some(BlockId::Number(BlockNumberOrTag::Number(block_number))) = self.block {
// Override Config `fork_block_number` (if set) with CLI value.
config.fork_block_number = Some(block_number);
}
let create2_deployer = evm_opts.create2_deployer;
let (mut env, fork, chain, networks) =
TracingExecutor::get_fork_material(&mut config, evm_opts).await?;
// modify settings that usually set in eth_call
env.evm_env.cfg_env.disable_block_gas_limit = true;
env.evm_env.cfg_env.tx_gas_limit_cap = Some(u64::MAX);
env.evm_env.block_env.gas_limit = u64::MAX;
// Apply the block overrides.
if let Some(block_overrides) = block_overrides {
if let Some(number) = block_overrides.number {
env.evm_env.block_env.number = number.to();
}
if let Some(time) = block_overrides.time {
env.evm_env.block_env.timestamp = U256::from(time);
}
}
let trace_mode = TraceMode::Call
.with_debug(debug)
.with_decode_internal(if decode_internal {
InternalTraceMode::Full
} else {
InternalTraceMode::None
})
.with_state_changes(shell::verbosity() > 4);
let mut executor = TracingExecutor::new(
env,
fork,
evm_version,
trace_mode,
networks,
create2_deployer,
state_overrides,
)?;
let value = tx.value.unwrap_or_default();
let input = tx.inner.input.into_input().unwrap_or_default();
let tx_kind = tx.inner.to.expect("set by builder");
let env_tx = &mut executor.env_mut().tx;
// Set transaction options with --trace
if let Some(gas_limit) = tx.inner.gas {
env_tx.gas_limit = gas_limit;
}
if let Some(gas_price) = tx.inner.gas_price {
env_tx.gas_price = gas_price;
}
if let Some(max_fee_per_gas) = tx.inner.max_fee_per_gas {
env_tx.gas_price = max_fee_per_gas;
}
if let Some(max_priority_fee_per_gas) = tx.inner.max_priority_fee_per_gas {
env_tx.gas_priority_fee = Some(max_priority_fee_per_gas);
}
if let Some(max_fee_per_blob_gas) = tx.inner.max_fee_per_blob_gas {
env_tx.max_fee_per_blob_gas = max_fee_per_blob_gas;
}
if let Some(nonce) = tx.inner.nonce {
env_tx.nonce = nonce;
}
if let Some(tx_type) = tx.inner.transaction_type {
env_tx.tx_type = tx_type;
}
if let Some(access_list) = tx.inner.access_list {
env_tx.access_list = access_list;
if env_tx.tx_type == TransactionType::Legacy as u8 {
env_tx.tx_type = TransactionType::Eip2930 as u8;
}
}
if let Some(auth) = tx.inner.authorization_list {
env_tx.authorization_list = auth.into_iter().map(Either::Left).collect();
env_tx.tx_type = TransactionType::Eip7702 as u8;
}
let trace = match tx_kind {
TxKind::Create => {
let deploy_result = executor.deploy(from, input, value, None);
TraceResult::try_from(deploy_result)?
}
TxKind::Call(to) => TraceResult::from_raw(
executor.transact_raw(from, to, input, value)?,
TraceKind::Execution,
),
};
let contracts_bytecode = fetch_contracts_bytecode_from_trace(&executor, &trace)?;
handle_traces(
trace,
&config,
chain,
&contracts_bytecode,
labels,
with_local_artifacts,
debug,
decode_internal,
disable_labels,
None,
)
.await?;
return Ok(());
}
let response = Cast::new(&provider)
.call(&tx, func.as_ref(), block, state_overrides, block_overrides)
.await?;
if response == "0x"
&& let Some(contract_address) = tx.to.and_then(|tx_kind| tx_kind.into_to())
{
let code = provider.get_code_at(contract_address).await?;
if code.is_empty() {
sh_warn!("Contract code is empty")?;
}
}
sh_println!("{}", response)?;
Ok(())
}
/// Parse state overrides from command line arguments.
pub fn get_state_overrides(&self) -> eyre::Result<Option<StateOverride>> {
// Early return if no override set - <https://github.com/foundry-rs/foundry/issues/10705>
if [
self.balance_overrides.as_ref(),
self.nonce_overrides.as_ref(),
self.code_overrides.as_ref(),
self.state_overrides.as_ref(),
self.state_diff_overrides.as_ref(),
]
.iter()
.all(Option::is_none)
{
return Ok(None);
}
let mut state_overrides_builder = StateOverridesBuilder::default();
// Parse balance overrides
for override_str in self.balance_overrides.iter().flatten() {
let (addr, balance) = address_value_override(override_str)?;
state_overrides_builder =
state_overrides_builder.with_balance(addr.parse()?, balance.parse()?);
}
// Parse nonce overrides
for override_str in self.nonce_overrides.iter().flatten() {
let (addr, nonce) = address_value_override(override_str)?;
state_overrides_builder =
state_overrides_builder.with_nonce(addr.parse()?, nonce.parse()?);
}
// Parse code overrides
for override_str in self.code_overrides.iter().flatten() {
let (addr, code_str) = address_value_override(override_str)?;
state_overrides_builder =
state_overrides_builder.with_code(addr.parse()?, Bytes::from_str(code_str)?);
}
type StateOverrides = HashMap<Address, HashMap<B256, B256>>;
let parse_state_overrides =
|overrides: &Option<Vec<String>>| -> Result<StateOverrides, eyre::Report> {
let mut state_overrides: StateOverrides = StateOverrides::default();
overrides.iter().flatten().try_for_each(|s| -> Result<(), eyre::Report> {
let (addr, slot, value) = address_slot_value_override(s)?;
state_overrides.entry(addr).or_default().insert(slot.into(), value.into());
Ok(())
})?;
Ok(state_overrides)
};
// Parse and apply state overrides
for (addr, entries) in parse_state_overrides(&self.state_overrides)? {
state_overrides_builder = state_overrides_builder.with_state(addr, entries.into_iter());
}
// Parse and apply state diff overrides
for (addr, entries) in parse_state_overrides(&self.state_diff_overrides)? {
state_overrides_builder =
state_overrides_builder.with_state_diff(addr, entries.into_iter())
}
Ok(Some(state_overrides_builder.build()))
}
/// Parse block overrides from command line arguments.
pub fn get_block_overrides(&self) -> eyre::Result<Option<BlockOverrides>> {
let mut overrides = BlockOverrides::default();
if let Some(number) = self.block_number {
overrides = overrides.with_number(U256::from(number));
}
if let Some(time) = self.block_time {
overrides = overrides.with_time(time);
}
if overrides.is_empty() { Ok(None) } else { Ok(Some(overrides)) }
}
}
impl figment::Provider for CallArgs {
fn metadata(&self) -> Metadata {
Metadata::named("CallArgs")
}
fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
let mut map = Map::new();
if let Some(evm_version) = self.evm_version {
map.insert("evm_version".into(), figment::value::Value::serialize(evm_version)?);
}
Ok(Map::from([(Config::selected_profile(), map)]))
}
}
/// Parse an override string in the format address:value.
fn address_value_override(address_override: &str) -> Result<(&str, &str)> {
address_override.split_once(':').ok_or_else(|| {
eyre::eyre!("Invalid override {address_override}. Expected <address>:<value>")
})
}
/// Parse an override string in the format address:slot:value.
fn address_slot_value_override(address_override: &str) -> Result<(Address, U256, U256)> {
let captures = OVERRIDE_PATTERN.captures(address_override).ok_or_else(|| {
eyre::eyre!("Invalid override {address_override}. Expected <address>:<slot>:<value>")
})?;
Ok((
captures[1].parse()?, // Address
captures[2].parse()?, // Slot (U256)
captures[3].parse()?, // Value (U256)
))
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{U64, address, b256, fixed_bytes, hex};
#[test]
fn test_get_state_overrides() {
let call_args = CallArgs::parse_from([
"foundry-cli",
"--override-balance",
"0x0000000000000000000000000000000000000001:2",
"--override-nonce",
"0x0000000000000000000000000000000000000001:3",
"--override-code",
"0x0000000000000000000000000000000000000001:0x04",
"--override-state",
"0x0000000000000000000000000000000000000001:5:6",
"--override-state-diff",
"0x0000000000000000000000000000000000000001:7:8",
]);
let overrides = call_args.get_state_overrides().unwrap().unwrap();
let address = address!("0x0000000000000000000000000000000000000001");
if let Some(account_override) = overrides.get(&address) {
if let Some(balance) = account_override.balance {
assert_eq!(balance, U256::from(2));
}
if let Some(nonce) = account_override.nonce {
assert_eq!(nonce, 3);
}
if let Some(code) = &account_override.code {
assert_eq!(*code, Bytes::from([0x04]));
}
if let Some(state) = &account_override.state
&& let Some(value) = state.get(&b256!(
"0x0000000000000000000000000000000000000000000000000000000000000005"
))
{
assert_eq!(
*value,
b256!("0x0000000000000000000000000000000000000000000000000000000000000006")
);
}
if let Some(state_diff) = &account_override.state_diff
&& let Some(value) = state_diff.get(&b256!(
"0x0000000000000000000000000000000000000000000000000000000000000007"
))
{
assert_eq!(
*value,
b256!("0x0000000000000000000000000000000000000000000000000000000000000008")
);
}
}
}
#[test]
fn test_get_state_overrides_empty() {
let call_args = CallArgs::parse_from([""]);
let overrides = call_args.get_state_overrides().unwrap();
assert_eq!(overrides, None);
}
#[test]
fn test_get_block_overrides() {
let mut call_args = CallArgs::parse_from([""]);
call_args.block_number = Some(1);
call_args.block_time = Some(2);
let overrides = call_args.get_block_overrides().unwrap().unwrap();
assert_eq!(overrides.number, Some(U256::from(1)));
assert_eq!(overrides.time, Some(2));
}
#[test]
fn test_get_block_overrides_empty() {
let call_args = CallArgs::parse_from([""]);
let overrides = call_args.get_block_overrides().unwrap();
assert_eq!(overrides, None);
}
#[test]
fn test_address_value_override_success() {
let text = "0x0000000000000000000000000000000000000001:2";
let (address, value) = address_value_override(text).unwrap();
assert_eq!(address, "0x0000000000000000000000000000000000000001");
assert_eq!(value, "2");
}
#[test]
fn test_address_value_override_error() {
let text = "invalid_value";
let error = address_value_override(text).unwrap_err();
assert_eq!(error.to_string(), "Invalid override invalid_value. Expected <address>:<value>");
}
#[test]
fn test_address_slot_value_override_success() {
let text = "0x0000000000000000000000000000000000000001:2:3";
let (address, slot, value) = address_slot_value_override(text).unwrap();
assert_eq!(*address, fixed_bytes!("0x0000000000000000000000000000000000000001"));
assert_eq!(slot, U256::from(2));
assert_eq!(value, U256::from(3));
}
#[test]
fn test_address_slot_value_override_error() {
let text = "invalid_value";
let error = address_slot_value_override(text).unwrap_err();
assert_eq!(
error.to_string(),
"Invalid override invalid_value. Expected <address>:<slot>:<value>"
);
}
#[test]
fn can_parse_call_data() {
let data = hex::encode("hello");
let args = CallArgs::parse_from(["foundry-cli", "--data", data.as_str()]);
assert_eq!(args.data, Some(data));
let data = hex::encode_prefixed("hello");
let args = CallArgs::parse_from(["foundry-cli", "--data", data.as_str()]);
assert_eq!(args.data, Some(data));
}
#[test]
fn can_parse_state_overrides() {
let args = CallArgs::parse_from([
"foundry-cli",
"--override-balance",
"0x123:0x1234",
"--override-nonce",
"0x123:1",
"--override-code",
"0x123:0x1234",
"--override-state",
"0x123:0x1:0x1234",
]);
assert_eq!(args.balance_overrides, Some(vec!["0x123:0x1234".to_string()]));
assert_eq!(args.nonce_overrides, Some(vec!["0x123:1".to_string()]));
assert_eq!(args.code_overrides, Some(vec!["0x123:0x1234".to_string()]));
assert_eq!(args.state_overrides, Some(vec!["0x123:0x1:0x1234".to_string()]));
}
#[test]
fn can_parse_multiple_state_overrides() {
let args = CallArgs::parse_from([
"foundry-cli",
"--override-balance",
"0x123:0x1234",
"--override-balance",
"0x456:0x5678",
"--override-nonce",
"0x123:1",
"--override-nonce",
"0x456:2",
"--override-code",
"0x123:0x1234",
"--override-code",
"0x456:0x5678",
"--override-state",
"0x123:0x1:0x1234",
"--override-state",
"0x456:0x2:0x5678",
]);
assert_eq!(
args.balance_overrides,
Some(vec!["0x123:0x1234".to_string(), "0x456:0x5678".to_string()])
);
assert_eq!(args.nonce_overrides, Some(vec!["0x123:1".to_string(), "0x456:2".to_string()]));
assert_eq!(
args.code_overrides,
Some(vec!["0x123:0x1234".to_string(), "0x456:0x5678".to_string()])
);
assert_eq!(
args.state_overrides,
Some(vec!["0x123:0x1:0x1234".to_string(), "0x456:0x2:0x5678".to_string()])
);
}
#[test]
fn test_negative_args_with_flags() {
// Test that negative args work with flags
let args = CallArgs::parse_from([
"foundry-cli",
"--trace",
"0xDeaDBeeFcAfEbAbEfAcEfEeDcBaDbEeFcAfEbAbE",
"process(int256)",
"-999999",
"--debug",
]);
assert!(args.trace);
assert!(args.debug);
assert_eq!(args.args, vec!["-999999"]);
}
#[test]
fn test_transaction_opts_with_trace() {
// Test that transaction options are correctly parsed when using --trace
let args = CallArgs::parse_from([
"foundry-cli",
"--trace",
"--gas-limit",
"1000000",
"--gas-price",
"20000000000",
"--priority-gas-price",
"2000000000",
"--nonce",
"42",
"--value",
"1000000000000000000", // 1 ETH
"--blob-gas-price",
"10000000000",
"0xDeaDBeeFcAfEbAbEfAcEfEeDcBaDbEeFcAfEbAbE",
"balanceOf(address)",
"0x123456789abcdef123456789abcdef123456789a",
]);
assert!(args.trace);
assert_eq!(args.tx.gas_limit, Some(U256::from(1000000u32)));
assert_eq!(args.tx.gas_price, Some(U256::from(20000000000u64)));
assert_eq!(args.tx.priority_gas_price, Some(U256::from(2000000000u64)));
assert_eq!(args.tx.nonce, Some(U64::from(42)));
assert_eq!(args.tx.value, Some(U256::from(1000000000000000000u64)));
assert_eq!(args.tx.blob_gas_price, Some(U256::from(10000000000u64)));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/interface.rs | crates/cast/src/cmd/interface.rs | use alloy_json_abi::{ContractObject, JsonAbi};
use alloy_primitives::Address;
use clap::Parser;
use eyre::{Context, Result};
use forge_fmt::FormatterConfig;
use foundry_cli::{
opts::EtherscanOpts,
utils::{LoadConfig, fetch_abi_from_etherscan},
};
use foundry_common::{
ContractsByArtifact,
compile::{PathOrContractInfo, ProjectCompiler},
find_target_path, fs, shell,
};
use foundry_config::load_config;
use itertools::Itertools;
use serde_json::Value;
use std::{
path::{Path, PathBuf},
str::FromStr,
};
/// CLI arguments for `cast interface`.
#[derive(Clone, Debug, Parser)]
pub struct InterfaceArgs {
/// The target contract, which can be one of:
/// - A file path to an ABI JSON file.
/// - A contract identifier in the form `<path>:<contractname>` or just `<contractname>`.
/// - An Ethereum address, for which the ABI will be fetched from Etherscan.
contract: String,
/// The name to use for the generated interface.
///
/// Only relevant when retrieving the ABI from a file.
#[arg(long, short)]
name: Option<String>,
/// Solidity pragma version.
#[arg(long, short, default_value = "^0.8.4", value_name = "VERSION")]
pragma: String,
/// The path to the output file.
///
/// If not specified, the interface will be output to stdout.
#[arg(
short,
long,
value_hint = clap::ValueHint::FilePath,
value_name = "PATH",
)]
output: Option<PathBuf>,
#[command(flatten)]
etherscan: EtherscanOpts,
}
impl InterfaceArgs {
pub async fn run(self) -> Result<()> {
let Self { contract, name, pragma, output: output_location, etherscan } = self;
// Determine if the target contract is an ABI file, a local contract or an Ethereum address.
let abis = if Path::new(&contract).is_file()
&& fs::read_to_string(&contract)
.ok()
.and_then(|content| serde_json::from_str::<Value>(&content).ok())
.is_some()
{
load_abi_from_file(&contract, name)?
} else {
match Address::from_str(&contract) {
Ok(address) => fetch_abi_from_etherscan(address, ðerscan.load_config()?).await?,
Err(_) => load_abi_from_artifact(&contract)?,
}
};
// Retrieve interfaces from the array of ABIs.
let interfaces = get_interfaces(abis)?;
// Print result or write to file.
let res = if shell::is_json() {
// Format as JSON.
interfaces.iter().map(|iface| &iface.json_abi).format("\n").to_string()
} else {
// Format as Solidity.
format!(
"// SPDX-License-Identifier: UNLICENSED\n\
pragma solidity {pragma};\n\n\
{}",
interfaces.iter().map(|iface| &iface.source).format("\n")
)
};
if let Some(loc) = output_location {
if let Some(parent) = loc.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&loc, res)?;
sh_println!("Saved interface at {}", loc.display())?;
} else {
sh_print!("{res}")?;
}
Ok(())
}
}
struct InterfaceSource {
json_abi: String,
source: String,
}
/// Load the ABI from a file.
pub fn load_abi_from_file(path: &str, name: Option<String>) -> Result<Vec<(JsonAbi, String)>> {
let file = std::fs::read_to_string(path).wrap_err("unable to read abi file")?;
let obj: ContractObject = serde_json::from_str(&file)?;
let abi = obj.abi.ok_or_else(|| eyre::eyre!("could not find ABI in file {path}"))?;
let name = name.unwrap_or_else(|| "Interface".to_owned());
Ok(vec![(abi, name)])
}
/// Load the ABI from the artifact of a locally compiled contract.
fn load_abi_from_artifact(path_or_contract: &str) -> Result<Vec<(JsonAbi, String)>> {
let config = load_config()?;
let project = config.project()?;
let compiler = ProjectCompiler::new().quiet(true);
let contract = PathOrContractInfo::from_str(path_or_contract)?;
let target_path = find_target_path(&project, &contract)?;
let output = compiler.files([target_path.clone()]).compile(&project)?;
let contracts_by_artifact = ContractsByArtifact::from(output);
let maybe_abi = contracts_by_artifact
.find_abi_by_name_or_src_path(contract.name().unwrap_or(&target_path.to_string_lossy()));
let (abi, name) =
maybe_abi.as_ref().ok_or_else(|| eyre::eyre!("Failed to fetch lossless ABI"))?;
Ok(vec![(abi.clone(), contract.name().unwrap_or(name).to_string())])
}
/// Converts a vector of tuples containing the ABI and contract name into a vector of
/// `InterfaceSource` objects.
fn get_interfaces(abis: Vec<(JsonAbi, String)>) -> Result<Vec<InterfaceSource>> {
abis.into_iter()
.map(|(contract_abi, name)| {
let source = match forge_fmt::format(
&contract_abi.to_sol(&name, None),
FormatterConfig::default(),
)
.into_result()
{
Ok(generated_source) => generated_source,
Err(e) => {
sh_warn!("Failed to format interface for {name}: {e}")?;
contract_abi.to_sol(&name, None)
}
};
Ok(InterfaceSource { json_abi: serde_json::to_string_pretty(&contract_abi)?, source })
})
.collect()
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/send.rs | crates/cast/src/cmd/send.rs | use std::{path::PathBuf, str::FromStr, time::Duration};
use alloy_eips::Encodable2718;
use alloy_ens::NameOrAddress;
use alloy_network::{AnyNetwork, EthereumWallet, NetworkWallet};
use alloy_provider::{Provider, ProviderBuilder};
use alloy_rpc_types::TransactionRequest;
use alloy_serde::WithOtherFields;
use alloy_signer::Signer;
use clap::Parser;
use eyre::{Result, eyre};
use foundry_cli::{opts::TransactionOpts, utils, utils::LoadConfig};
use foundry_wallets::WalletSigner;
use crate::tx::{self, CastTxBuilder, CastTxSender, SendTxOpts};
/// CLI arguments for `cast send`.
#[derive(Debug, Parser)]
pub struct SendTxArgs {
/// The destination of the transaction.
///
/// If not provided, you must use cast send --create.
#[arg(value_parser = NameOrAddress::from_str)]
to: Option<NameOrAddress>,
/// The signature of the function to call.
sig: Option<String>,
/// The arguments of the function to call.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
/// Raw hex-encoded data for the transaction. Used instead of \[SIG\] and \[ARGS\].
#[arg(
long,
conflicts_with_all = &["sig", "args"]
)]
data: Option<String>,
#[command(flatten)]
send_tx: SendTxOpts,
#[command(subcommand)]
command: Option<SendTxSubcommands>,
/// Send via `eth_sendTransaction` using the `--from` argument or $ETH_FROM as sender
#[arg(long, requires = "from")]
unlocked: bool,
#[command(flatten)]
tx: TransactionOpts,
/// The path of blob data to be sent.
#[arg(
long,
value_name = "BLOB_DATA_PATH",
conflicts_with = "legacy",
requires = "blob",
help_heading = "Transaction options"
)]
path: Option<PathBuf>,
}
#[derive(Debug, Parser)]
pub enum SendTxSubcommands {
/// Use to deploy raw contract bytecode.
#[command(name = "--create")]
Create {
/// The bytecode of the contract to deploy.
code: String,
/// The signature of the function to call.
sig: Option<String>,
/// The arguments of the function to call.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
},
}
impl SendTxArgs {
pub async fn run(self) -> eyre::Result<()> {
let Self { to, mut sig, mut args, data, send_tx, tx, command, unlocked, path } = self;
let blob_data = if let Some(path) = path { Some(std::fs::read(path)?) } else { None };
if let Some(data) = data {
sig = Some(data);
}
let code = if let Some(SendTxSubcommands::Create {
code,
sig: constructor_sig,
args: constructor_args,
}) = command
{
// ensure we don't violate settings for transactions that can't be CREATE: 7702 and 4844
// which require mandatory target
if to.is_none() && !tx.auth.is_empty() {
return Err(eyre!(
"EIP-7702 transactions can't be CREATE transactions and require a destination address"
));
}
// ensure we don't violate settings for transactions that can't be CREATE: 7702 and 4844
// which require mandatory target
if to.is_none() && blob_data.is_some() {
return Err(eyre!(
"EIP-4844 transactions can't be CREATE transactions and require a destination address"
));
}
sig = constructor_sig;
args = constructor_args;
Some(code)
} else {
None
};
let config = send_tx.eth.load_config()?;
let provider = utils::get_provider(&config)?;
if let Some(interval) = send_tx.poll_interval {
provider.client().set_poll_interval(Duration::from_secs(interval))
}
let builder = CastTxBuilder::new(&provider, tx, &config)
.await?
.with_to(to)
.await?
.with_code_sig_and_args(code, sig, args)
.await?
.with_blob_data(blob_data)?;
let timeout = send_tx.timeout.unwrap_or(config.transaction_timeout);
// Check if this is a Tempo transaction - requires special handling for local signing
let is_tempo = builder.is_tempo();
// Tempo transactions with browser wallets are not supported
if is_tempo && send_tx.eth.wallet.browser {
return Err(eyre!("Tempo transactions are not supported with browser wallets."));
}
// Case 1:
// Default to sending via eth_sendTransaction if the --unlocked flag is passed.
// This should be the only way this RPC method is used as it requires a local node
// or remote RPC with unlocked accounts.
if unlocked && !send_tx.eth.wallet.browser {
// only check current chain id if it was specified in the config
if let Some(config_chain) = config.chain {
let current_chain_id = provider.get_chain_id().await?;
let config_chain_id = config_chain.id();
// switch chain if current chain id is not the same as the one specified in the
// config
if config_chain_id != current_chain_id {
sh_warn!("Switching to chain {}", config_chain)?;
provider
.raw_request::<_, ()>(
"wallet_switchEthereumChain".into(),
[serde_json::json!({
"chainId": format!("0x{:x}", config_chain_id),
})],
)
.await?;
}
}
let (tx, _) = builder.build(config.sender).await?;
cast_send(
provider,
tx.into_inner(),
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
timeout,
)
.await
// Case 2:
// An option to use a local signer was provided.
// If we cannot successfully instantiate a local signer, then we will assume we don't have
// enough information to sign and we must bail.
} else {
// Retrieve the signer, and bail if it can't be constructed.
let signer = send_tx.eth.wallet.signer().await?;
let from = signer.address();
tx::validate_from_address(send_tx.eth.wallet.from, from)?;
// Browser wallets work differently as they sign and send the transaction in one step.
if send_tx.eth.wallet.browser
&& let WalletSigner::Browser(ref browser_signer) = signer
{
let (tx_request, _) = builder.build(from).await?;
let tx_hash = browser_signer
.send_transaction_via_browser(tx_request.into_inner().inner)
.await?;
if send_tx.cast_async {
sh_println!("{tx_hash:#x}")?;
} else {
let receipt = CastTxSender::new(&provider)
.receipt(
format!("{tx_hash:#x}"),
None,
send_tx.confirmations,
Some(timeout),
false,
)
.await?;
sh_println!("{receipt}")?;
}
return Ok(());
}
// Tempo transactions need to be signed locally and sent as raw transactions
// because EthereumWallet doesn't understand type 0x76
// TODO(onbjerg): All of this is a side effect of a few things, most notably that we do
// not use `FoundryNetwork` and `FoundryTransactionRequest` everywhere, which is
// downstream of the fact that we use `EthereumWallet` everywhere.
if is_tempo {
let (ftx, _) = builder.build(&signer).await?;
// Sign using NetworkWallet<FoundryNetwork>
let signed_tx = signer.sign_request(ftx).await?;
// Encode and send raw
let mut raw_tx = Vec::with_capacity(signed_tx.encode_2718_len());
signed_tx.encode_2718(&mut raw_tx);
let cast = CastTxSender::new(&provider);
let pending_tx = cast.send_raw(&raw_tx).await?;
let tx_hash = pending_tx.inner().tx_hash();
if send_tx.cast_async {
sh_println!("{tx_hash:#x}")?;
} else if send_tx.sync {
// For sync mode, we already have the hash, just wait for receipt
let receipt = cast
.receipt(
format!("{tx_hash:#x}"),
None,
send_tx.confirmations,
Some(timeout),
false,
)
.await?;
sh_println!("{receipt}")?;
} else {
let receipt = cast
.receipt(
format!("{tx_hash:#x}"),
None,
send_tx.confirmations,
Some(timeout),
false,
)
.await?;
sh_println!("{receipt}")?;
}
return Ok(());
}
let (tx_request, _) = builder.build(&signer).await?;
let wallet = EthereumWallet::from(signer);
let provider = ProviderBuilder::<_, _, AnyNetwork>::default()
.wallet(wallet)
.connect_provider(&provider);
cast_send(
provider,
tx_request.into_inner(),
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
timeout,
)
.await
}
}
}
pub(crate) async fn cast_send<P: Provider<AnyNetwork>>(
provider: P,
tx: WithOtherFields<TransactionRequest>,
cast_async: bool,
sync: bool,
confs: u64,
timeout: u64,
) -> Result<()> {
let cast = CastTxSender::new(&provider);
if sync {
// Send transaction and wait for receipt synchronously
let receipt = cast.send_sync(tx).await?;
sh_println!("{receipt}")?;
} else {
let pending_tx = cast.send(tx).await?;
let tx_hash = pending_tx.inner().tx_hash();
if cast_async {
sh_println!("{tx_hash:#x}")?;
} else {
let receipt =
cast.receipt(format!("{tx_hash:#x}"), None, confs, Some(timeout), false).await?;
sh_println!("{receipt}")?;
}
}
Ok(())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/b2e_payload.rs | crates/cast/src/cmd/b2e_payload.rs | //! Command Line handler to convert Beacon block's execution payload to Execution format.
use std::path::PathBuf;
use alloy_rpc_types_beacon::payload::BeaconBlockData;
use clap::{Parser, builder::ValueParser};
use eyre::{Result, eyre};
use foundry_common::{fs, sh_print};
/// CLI arguments for `cast b2e-payload`, convert Beacon block's execution payload to Execution
/// format.
#[derive(Parser)]
pub struct B2EPayloadArgs {
/// Input data, it can be either a file path to JSON file or raw JSON string containing the
/// beacon block
#[arg(value_name = "INPUT", value_parser=ValueParser::new(parse_input_source), help = "File path to JSON file or raw JSON string containing the beacon block")]
pub input: InputSource,
}
impl B2EPayloadArgs {
pub async fn run(self) -> Result<()> {
let beacon_block_json = match self.input {
InputSource::Json(json) => json,
InputSource::File(path) => fs::read_to_string(&path)
.map_err(|e| eyre!("Failed to read JSON file '{}': {}", path.display(), e))?,
};
let beacon_block_data: BeaconBlockData = serde_json::from_str(&beacon_block_json)
.map_err(|e| eyre!("Failed to parse beacon block JSON: {}", e))?;
let execution_payload = beacon_block_data.execution_payload();
// Output raw execution payload
let output = serde_json::to_string(&execution_payload)
.map_err(|e| eyre!("Failed to serialize execution payload: {}", e))?;
sh_print!("{}", output)?;
Ok(())
}
}
/// Represents the different input sources for beacon block data
#[derive(Debug, Clone)]
pub enum InputSource {
/// Path to a JSON file containing beacon block data
File(PathBuf),
/// Raw JSON string containing beacon block data
Json(String),
}
fn parse_input_source(s: &str) -> Result<InputSource, String> {
// Try parsing as JSON first
if serde_json::from_str::<serde_json::Value>(s).is_ok() {
return Ok(InputSource::Json(s.to_string()));
}
// Otherwise treat as file path
Ok(InputSource::File(PathBuf::from(s)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_input_source_json_object() {
let json_input = r#"{"execution_payload": {"block_hash": "0x123"}}"#;
let result = parse_input_source(json_input).unwrap();
match result {
InputSource::Json(json) => assert_eq!(json, json_input),
InputSource::File(_) => panic!("Expected JSON input, got File"),
}
}
#[test]
fn test_parse_input_source_json_array() {
let json_input = r#"[{"block": "data"}]"#;
let result = parse_input_source(json_input).unwrap();
match result {
InputSource::Json(json) => assert_eq!(json, json_input),
InputSource::File(_) => panic!("Expected JSON input, got File"),
}
}
#[test]
fn test_parse_input_source_file_path() {
let file_path =
"block-12225729-6ceadbf2a6adbbd64cbec33fdebbc582f25171cd30ac43f641cbe76ac7313ddf.json";
let result = parse_input_source(file_path).unwrap();
match result {
InputSource::File(path) => assert_eq!(path, PathBuf::from(file_path)),
InputSource::Json(_) => panic!("Expected File input, got JSON"),
}
}
#[test]
fn test_parse_input_source_malformed_but_not_json() {
let malformed = "not-json-{";
let result = parse_input_source(malformed).unwrap();
// Should be treated as file path since it's not valid JSON
match result {
InputSource::File(path) => assert_eq!(path, PathBuf::from(malformed)),
InputSource::Json(_) => panic!("Expected File input, got File"),
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/storage.rs | crates/cast/src/cmd/storage.rs | use crate::{Cast, opts::parse_slot};
use alloy_ens::NameOrAddress;
use alloy_network::AnyNetwork;
use alloy_primitives::{Address, B256, U256};
use alloy_provider::Provider;
use alloy_rpc_types::BlockId;
use clap::Parser;
use comfy_table::{Cell, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN};
use eyre::Result;
use foundry_block_explorers::Client;
use foundry_cli::{
opts::{BuildOpts, EtherscanOpts, RpcOpts},
utils,
utils::LoadConfig,
};
use foundry_common::{
abi::find_source,
compile::{ProjectCompiler, etherscan_project},
shell,
};
use foundry_compilers::{
Artifact, Project,
artifacts::{ConfigurableContractArtifact, Contract, StorageLayout},
compilers::{
Compiler,
solc::{Solc, SolcCompiler},
},
};
use foundry_config::{
Config,
figment::{self, Metadata, Profile, value::Dict},
impl_figment_convert_cast,
};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::str::FromStr;
/// The minimum Solc version for outputting storage layouts.
///
/// <https://github.com/ethereum/solidity/blob/develop/Changelog.md#065-2020-04-06>
const MIN_SOLC: Version = Version::new(0, 6, 5);
/// CLI arguments for `cast storage`.
#[derive(Clone, Debug, Parser)]
pub struct StorageArgs {
/// The contract address.
#[arg(value_parser = NameOrAddress::from_str)]
address: NameOrAddress,
/// The storage slot number. If not provided, it gets the full storage layout.
#[arg(value_parser = parse_slot)]
base_slot: Option<B256>,
/// The storage offset from the base slot. If not provided, it is assumed to be zero.
#[arg(value_parser = str::parse::<U256>, default_value_t = U256::ZERO)]
offset: U256,
/// The known proxy address. If provided, the storage layout is retrieved from this address.
#[arg(long,value_parser = NameOrAddress::from_str)]
proxy: Option<NameOrAddress>,
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short)]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
#[command(flatten)]
etherscan: EtherscanOpts,
#[command(flatten)]
build: BuildOpts,
/// Specify the solc version to compile with. Overrides detected version.
#[arg(long, value_parser = Version::parse)]
solc_version: Option<Version>,
}
impl_figment_convert_cast!(StorageArgs);
impl figment::Provider for StorageArgs {
fn metadata(&self) -> Metadata {
Metadata::named("StorageArgs")
}
fn data(&self) -> Result<figment::value::Map<Profile, Dict>, figment::Error> {
let mut map = self.build.data()?;
let dict = map.get_mut(&Config::selected_profile()).unwrap();
dict.extend(self.rpc.dict());
dict.extend(self.etherscan.dict());
Ok(map)
}
}
impl StorageArgs {
pub async fn run(self) -> Result<()> {
let config = self.load_config()?;
let Self { address, base_slot, offset, block, build, .. } = self;
let provider = utils::get_provider(&config)?;
let address = address.resolve(&provider).await?;
// Slot was provided, perform a simple RPC call
if let Some(slot) = base_slot {
let cast = Cast::new(provider);
sh_println!(
"{}",
cast.storage(
address,
(Into::<U256>::into(slot).saturating_add(offset)).into(),
block
)
.await?
)?;
return Ok(());
}
// No slot was provided
// Get deployed bytecode at given address
let address_code =
provider.get_code_at(address).block_id(block.unwrap_or_default()).await?;
if address_code.is_empty() {
eyre::bail!("Provided address has no deployed code and thus no storage");
}
// Check if we're in a forge project and if we can find the address' code
let mut project = build.project()?;
if project.paths.has_input_files() {
// Find in artifacts and pretty print
add_storage_layout_output(&mut project);
let out = ProjectCompiler::new().quiet(shell::is_json()).compile(&project)?;
let artifact = out.artifacts().find(|(_, artifact)| {
artifact.get_deployed_bytecode_bytes().is_some_and(|b| *b == address_code)
});
if let Some((_, artifact)) = artifact {
return fetch_and_print_storage(provider, address, block, artifact).await;
}
}
let chain = utils::get_chain(config.chain, &provider).await?;
let api_key = config.get_etherscan_api_key(Some(chain)).or_else(|| self.etherscan.key()).ok_or_else(|| {
eyre::eyre!("You must provide an Etherscan API key if you're fetching a remote contract's storage.")
})?;
let client = Client::new(chain, api_key)?;
let source = if let Some(proxy) = self.proxy {
find_source(client, proxy.resolve(&provider).await?).await?
} else {
find_source(client, address).await?
};
let metadata = source.items.first().unwrap();
if metadata.is_vyper() {
eyre::bail!("Contract at provided address is not a valid Solidity contract")
}
// Create or reuse a persistent cache for Etherscan sources; fall back to a temp dir
let root_path = if let Some(cache_root) =
foundry_config::Config::foundry_etherscan_chain_cache_dir(chain)
{
let sources_root = cache_root.join("sources");
let contract_root = sources_root.join(format!("{address}"));
if let Err(err) = std::fs::create_dir_all(&contract_root) {
sh_warn!("Could not create etherscan cache dir, falling back to temp: {err}")?;
tempfile::tempdir()?.path().to_path_buf()
} else {
contract_root
}
} else {
tempfile::tempdir()?.keep()
};
let mut project = etherscan_project(metadata, &root_path)?;
add_storage_layout_output(&mut project);
// Decide on compiler to use (user override -> metadata -> autodetect)
let meta_version = metadata.compiler_version()?;
let mut auto_detect = false;
let desired = if let Some(user_version) = self.solc_version {
if user_version < MIN_SOLC {
sh_warn!(
"The provided --solc-version is {user_version} while the minimum version for \
storage layouts is {MIN_SOLC} and as a result the output may be empty."
)?;
}
SolcCompiler::Specific(Solc::find_or_install(&user_version)?)
} else if meta_version < MIN_SOLC {
auto_detect = true;
SolcCompiler::AutoDetect
} else {
SolcCompiler::Specific(Solc::find_or_install(&meta_version)?)
};
project.compiler.solc = Some(desired);
// Compile
let mut out = ProjectCompiler::new().quiet(true).compile(&project)?;
let artifact = {
let (_, mut artifact) = out
.artifacts()
.find(|(name, _)| name == &metadata.contract_name)
.ok_or_else(|| eyre::eyre!("Could not find artifact"))?;
if auto_detect && is_storage_layout_empty(&artifact.storage_layout) {
// try recompiling with the minimum version
sh_warn!(
"The requested contract was compiled with {meta_version} while the minimum version \
for storage layouts is {MIN_SOLC} and as a result the output may be empty.",
)?;
let solc = Solc::find_or_install(&MIN_SOLC)?;
project.compiler.solc = Some(SolcCompiler::Specific(solc));
if let Ok(output) = ProjectCompiler::new().quiet(true).compile(&project) {
out = output;
let (_, new_artifact) = out
.artifacts()
.find(|(name, _)| name == &metadata.contract_name)
.ok_or_else(|| eyre::eyre!("Could not find artifact"))?;
artifact = new_artifact;
}
}
artifact
};
fetch_and_print_storage(provider, address, block, artifact).await
}
}
/// Represents the value of a storage slot `eth_getStorageAt` call.
#[derive(Clone, Debug, PartialEq, Eq)]
struct StorageValue {
/// The slot number.
slot: B256,
/// The value as returned by `eth_getStorageAt`.
raw_slot_value: B256,
}
impl StorageValue {
/// Returns the value of the storage slot, applying the offset if necessary.
fn value(&self, offset: i64, number_of_bytes: Option<usize>) -> B256 {
let offset = offset as usize;
let mut end = 32;
if let Some(number_of_bytes) = number_of_bytes {
end = offset + number_of_bytes;
if end > 32 {
end = 32;
}
}
// reverse range, because the value is stored in big endian
let raw_sliced_value = &self.raw_slot_value.as_slice()[32 - end..32 - offset];
// copy the raw sliced value as tail
let mut value = [0u8; 32];
value[32 - raw_sliced_value.len()..32].copy_from_slice(raw_sliced_value);
B256::from(value)
}
}
/// Represents the storage layout of a contract and its values.
#[derive(Clone, Debug, Serialize, Deserialize)]
struct StorageReport {
#[serde(flatten)]
layout: StorageLayout,
values: Vec<B256>,
}
async fn fetch_and_print_storage<P: Provider<AnyNetwork>>(
provider: P,
address: Address,
block: Option<BlockId>,
artifact: &ConfigurableContractArtifact,
) -> Result<()> {
if is_storage_layout_empty(&artifact.storage_layout) {
sh_warn!("Storage layout is empty.")?;
Ok(())
} else {
let layout = artifact.storage_layout.as_ref().unwrap().clone();
let values = fetch_storage_slots(provider, address, block, &layout).await?;
print_storage(layout, values)
}
}
async fn fetch_storage_slots<P: Provider<AnyNetwork>>(
provider: P,
address: Address,
block: Option<BlockId>,
layout: &StorageLayout,
) -> Result<Vec<StorageValue>> {
let requests = layout.storage.iter().map(|storage_slot| async {
let slot = B256::from(U256::from_str(&storage_slot.slot)?);
let raw_slot_value = provider
.get_storage_at(address, slot.into())
.block_id(block.unwrap_or_default())
.await?;
let value = StorageValue { slot, raw_slot_value: raw_slot_value.into() };
Ok(value)
});
futures::future::try_join_all(requests).await
}
fn print_storage(layout: StorageLayout, values: Vec<StorageValue>) -> Result<()> {
if shell::is_json() {
let values: Vec<_> = layout
.storage
.iter()
.zip(&values)
.map(|(slot, storage_value)| {
let storage_type = layout.types.get(&slot.storage_type);
storage_value.value(
slot.offset,
storage_type.and_then(|t| t.number_of_bytes.parse::<usize>().ok()),
)
})
.collect();
sh_println!(
"{}",
serde_json::to_string_pretty(&serde_json::to_value(StorageReport { layout, values })?)?
)?;
return Ok(());
}
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![
Cell::new("Name"),
Cell::new("Type"),
Cell::new("Slot"),
Cell::new("Offset"),
Cell::new("Bytes"),
Cell::new("Value"),
Cell::new("Hex Value"),
Cell::new("Contract"),
]);
for (slot, storage_value) in layout.storage.into_iter().zip(values) {
let storage_type = layout.types.get(&slot.storage_type);
let value = storage_value
.value(slot.offset, storage_type.and_then(|t| t.number_of_bytes.parse::<usize>().ok()));
let converted_value = U256::from_be_bytes(value.0);
table.add_row([
slot.label.as_str(),
storage_type.map_or("?", |t| &t.label),
&slot.slot,
&slot.offset.to_string(),
storage_type.map_or("?", |t| &t.number_of_bytes),
&converted_value.to_string(),
&value.to_string(),
&slot.contract,
]);
}
sh_println!("\n{table}\n")?;
Ok(())
}
fn add_storage_layout_output<C: Compiler<CompilerContract = Contract>>(project: &mut Project<C>) {
project.artifacts.additional_values.storage_layout = true;
project.update_output_selection(|selection| {
selection.0.values_mut().for_each(|contract_selection| {
contract_selection
.values_mut()
.for_each(|selection| selection.push("storageLayout".to_string()))
});
})
}
fn is_storage_layout_empty(storage_layout: &Option<StorageLayout>) -> bool {
if let Some(s) = storage_layout { s.storage.is_empty() } else { true }
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_storage_etherscan_api_key() {
let args =
StorageArgs::parse_from(["foundry-cli", "addr.eth", "--etherscan-api-key", "dummykey"]);
assert_eq!(args.etherscan.key(), Some("dummykey".to_string()));
unsafe {
std::env::set_var("ETHERSCAN_API_KEY", "FXY");
}
let config = args.load_config().unwrap();
unsafe {
std::env::remove_var("ETHERSCAN_API_KEY");
}
assert_eq!(config.etherscan_api_key, Some("dummykey".to_string()));
let key = config.get_etherscan_api_key(None).unwrap();
assert_eq!(key, "dummykey".to_string());
}
#[test]
fn parse_solc_version_arg() {
let args = StorageArgs::parse_from(["foundry-cli", "addr.eth", "--solc-version", "0.8.10"]);
assert_eq!(args.solc_version, Some(Version::parse("0.8.10").unwrap()));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/access_list.rs | crates/cast/src/cmd/access_list.rs | use crate::{
Cast,
tx::{CastTxBuilder, SenderKind},
};
use alloy_ens::NameOrAddress;
use alloy_rpc_types::BlockId;
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::{RpcOpts, TransactionOpts},
utils::{self, LoadConfig},
};
use foundry_wallets::WalletOpts;
use std::str::FromStr;
/// CLI arguments for `cast access-list`.
#[derive(Debug, Parser)]
pub struct AccessListArgs {
/// The destination of the transaction.
#[arg(
value_name = "TO",
value_parser = NameOrAddress::from_str
)]
to: Option<NameOrAddress>,
/// The signature of the function to call.
#[arg(value_name = "SIG")]
sig: Option<String>,
/// The arguments of the function to call.
#[arg(value_name = "ARGS", allow_negative_numbers = true)]
args: Vec<String>,
/// The block height to query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
tx: TransactionOpts,
#[command(flatten)]
rpc: RpcOpts,
#[command(flatten)]
wallet: WalletOpts,
}
impl AccessListArgs {
pub async fn run(self) -> Result<()> {
let Self { to, sig, args, tx, rpc, wallet, block } = self;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let sender = SenderKind::from_wallet_opts(wallet).await?;
let (tx, _) = CastTxBuilder::new(&provider, tx, &config)
.await?
.with_to(to)
.await?
.with_code_sig_and_args(None, sig, args)
.await?
.build_raw(sender)
.await?;
let cast = Cast::new(&provider);
let access_list: String = cast.access_list(&tx, block).await?;
sh_println!("{access_list}")?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/mod.rs | crates/cast/src/cmd/mod.rs | //! `cast` subcommands.
//!
//! All subcommands should respect the `foundry_config::Config`.
//! If a subcommand accepts values that are supported by the `Config`, then the subcommand should
//! implement `figment::Provider` which allows the subcommand to override the config's defaults, see
//! [`foundry_config::Config`].
pub mod access_list;
pub mod artifact;
pub mod b2e_payload;
pub mod bind;
pub mod call;
pub mod constructor_args;
pub mod create2;
pub mod creation_code;
pub mod da_estimate;
pub mod erc20;
pub mod estimate;
pub mod find_block;
pub mod interface;
pub mod logs;
pub mod mktx;
pub mod rpc;
pub mod run;
pub mod send;
pub mod storage;
pub mod txpool;
pub mod wallet;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/run.rs | crates/cast/src/cmd/run.rs | use crate::{debug::handle_traces, utils::apply_chain_and_block_specific_env_changes};
use alloy_consensus::Transaction;
use alloy_network::{AnyNetwork, TransactionResponse};
use alloy_primitives::{
Address, Bytes, U256,
map::{AddressSet, HashMap},
};
use alloy_provider::Provider;
use alloy_rpc_types::BlockTransactions;
use clap::Parser;
use eyre::{Result, WrapErr};
use foundry_cli::{
opts::{EtherscanOpts, RpcOpts},
utils::{TraceResult, init_progress},
};
use foundry_common::{SYSTEM_TRANSACTION_TYPE, is_impersonated_tx, is_known_system_sender, shell};
use foundry_compilers::artifacts::EvmVersion;
use foundry_config::{
Config,
figment::{
self, Metadata, Profile,
value::{Dict, Map},
},
};
use foundry_evm::{
Env,
core::env::AsEnvMut,
executors::{EvmError, Executor, TracingExecutor},
opts::EvmOpts,
traces::{InternalTraceMode, TraceMode, Traces},
utils::configure_tx_env,
};
use futures::TryFutureExt;
use revm::DatabaseRef;
/// CLI arguments for `cast run`.
#[derive(Clone, Debug, Parser)]
pub struct RunArgs {
/// The transaction hash.
tx_hash: String,
/// Opens the transaction in the debugger.
#[arg(long, short)]
debug: bool,
/// Whether to identify internal functions in traces.
#[arg(long)]
decode_internal: bool,
/// Defines the depth of a trace
#[arg(long)]
trace_depth: Option<usize>,
/// Print out opcode traces.
#[arg(long, short)]
trace_printer: bool,
/// Executes the transaction only with the state from the previous block.
///
/// May result in different results than the live execution!
#[arg(long)]
quick: bool,
/// Whether to replay system transactions.
#[arg(long, alias = "sys")]
replay_system_txes: bool,
/// Disables the labels in the traces.
#[arg(long, default_value_t = false)]
disable_labels: bool,
/// Label addresses in the trace.
///
/// Example: 0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045:vitalik.eth
#[arg(long, short)]
label: Vec<String>,
#[command(flatten)]
etherscan: EtherscanOpts,
#[command(flatten)]
rpc: RpcOpts,
/// The EVM version to use.
///
/// Overrides the version specified in the config.
#[arg(long)]
evm_version: Option<EvmVersion>,
/// Sets the number of assumed available compute units per second for this provider
///
/// default value: 330
///
/// See also, <https://docs.alchemy.com/reference/compute-units#what-are-cups-compute-units-per-second>
#[arg(long, alias = "cups", value_name = "CUPS")]
pub compute_units_per_second: Option<u64>,
/// Disables rate limiting for this node's provider.
///
/// default value: false
///
/// See also, <https://docs.alchemy.com/reference/compute-units#what-are-cups-compute-units-per-second>
#[arg(long, value_name = "NO_RATE_LIMITS", visible_alias = "no-rpc-rate-limit")]
pub no_rate_limit: bool,
/// Use current project artifacts for trace decoding.
#[arg(long, visible_alias = "la")]
pub with_local_artifacts: bool,
/// Disable block gas limit check.
#[arg(long)]
pub disable_block_gas_limit: bool,
/// Enable the tx gas limit checks as imposed by Osaka (EIP-7825).
#[arg(long)]
pub enable_tx_gas_limit: bool,
}
impl RunArgs {
/// Executes the transaction by replaying it
///
/// This replays the entire block the transaction was mined in unless `quick` is set to true
///
/// Note: This executes the transaction(s) as is: Cheatcodes are disabled
pub async fn run(self) -> Result<()> {
let figment = self.rpc.clone().into_figment(self.with_local_artifacts).merge(&self);
let evm_opts = figment.extract::<EvmOpts>()?;
let mut config = Config::from_provider(figment)?.sanitized();
let label = self.label;
let with_local_artifacts = self.with_local_artifacts;
let debug = self.debug;
let decode_internal = self.decode_internal;
let disable_labels = self.disable_labels;
let compute_units_per_second =
if self.no_rate_limit { Some(u64::MAX) } else { self.compute_units_per_second };
let provider = foundry_cli::utils::get_provider_builder(&config)?
.compute_units_per_second_opt(compute_units_per_second)
.build()?;
let tx_hash = self.tx_hash.parse().wrap_err("invalid tx hash")?;
let tx = provider
.get_transaction_by_hash(tx_hash)
.await
.wrap_err_with(|| format!("tx not found: {tx_hash:?}"))?
.ok_or_else(|| eyre::eyre!("tx not found: {:?}", tx_hash))?;
// check if the tx is a system transaction
if !self.replay_system_txes
&& (is_known_system_sender(tx.from())
|| tx.transaction_type() == Some(SYSTEM_TRANSACTION_TYPE))
{
return Err(eyre::eyre!(
"{:?} is a system transaction.\nReplaying system transactions is currently not supported.",
tx.tx_hash()
));
}
let tx_block_number =
tx.block_number.ok_or_else(|| eyre::eyre!("tx may still be pending: {:?}", tx_hash))?;
// we need to fork off the parent block
config.fork_block_number = Some(tx_block_number - 1);
let create2_deployer = evm_opts.create2_deployer;
let (block, (mut env, fork, chain, networks)) = tokio::try_join!(
// fetch the block the transaction was mined in
provider.get_block(tx_block_number.into()).full().into_future().map_err(Into::into),
TracingExecutor::get_fork_material(&mut config, evm_opts)
)?;
let mut evm_version = self.evm_version;
env.evm_env.cfg_env.disable_block_gas_limit = self.disable_block_gas_limit;
// By default do not enforce transaction gas limits imposed by Osaka (EIP-7825).
// Users can opt-in to enable these limits by setting `enable_tx_gas_limit` to true.
if !self.enable_tx_gas_limit {
env.evm_env.cfg_env.tx_gas_limit_cap = Some(u64::MAX);
}
env.evm_env.cfg_env.limit_contract_code_size = None;
env.evm_env.block_env.number = U256::from(tx_block_number);
if let Some(block) = &block {
env.evm_env.block_env.timestamp = U256::from(block.header.timestamp);
env.evm_env.block_env.beneficiary = block.header.beneficiary;
env.evm_env.block_env.difficulty = block.header.difficulty;
env.evm_env.block_env.prevrandao = Some(block.header.mix_hash.unwrap_or_default());
env.evm_env.block_env.basefee = block.header.base_fee_per_gas.unwrap_or_default();
env.evm_env.block_env.gas_limit = block.header.gas_limit;
// TODO: we need a smarter way to map the block to the corresponding evm_version for
// commonly used chains
if evm_version.is_none() {
// if the block has the excess_blob_gas field, we assume it's a Cancun block
if block.header.excess_blob_gas.is_some() {
evm_version = Some(EvmVersion::Prague);
}
}
apply_chain_and_block_specific_env_changes::<AnyNetwork>(
env.as_env_mut(),
block,
config.networks,
);
}
let trace_mode = TraceMode::Call
.with_debug(self.debug)
.with_decode_internal(if self.decode_internal {
InternalTraceMode::Full
} else {
InternalTraceMode::None
})
.with_state_changes(shell::verbosity() > 4);
let mut executor = TracingExecutor::new(
env.clone(),
fork,
evm_version,
trace_mode,
networks,
create2_deployer,
None,
)?;
let mut env = Env::new_with_spec_id(
env.evm_env.cfg_env.clone(),
env.evm_env.block_env.clone(),
env.tx.clone(),
executor.spec_id(),
);
// Set the state to the moment right before the transaction
if !self.quick {
if !shell::is_json() {
sh_println!("Executing previous transactions from the block.")?;
}
if let Some(block) = block {
let pb = init_progress(block.transactions.len() as u64, "tx");
pb.set_position(0);
let BlockTransactions::Full(ref txs) = block.transactions else {
return Err(eyre::eyre!("Could not get block txs"));
};
for (index, tx) in txs.iter().enumerate() {
// Replay system transactions only if running with `sys` option.
// System transactions such as on L2s don't contain any pricing info so it
// could cause reverts.
if !self.replay_system_txes
&& (is_known_system_sender(tx.from())
|| tx.transaction_type() == Some(SYSTEM_TRANSACTION_TYPE))
{
pb.set_position((index + 1) as u64);
continue;
}
if tx.tx_hash() == tx_hash {
break;
}
configure_tx_env(&mut env.as_env_mut(), &tx.inner);
env.evm_env.cfg_env.disable_balance_check = true;
if let Some(to) = Transaction::to(tx) {
trace!(tx=?tx.tx_hash(),?to, "executing previous call transaction");
executor.transact_with_env(env.clone()).wrap_err_with(|| {
format!(
"Failed to execute transaction: {:?} in block {}",
tx.tx_hash(),
env.evm_env.block_env.number
)
})?;
} else {
trace!(tx=?tx.tx_hash(), "executing previous create transaction");
if let Err(error) = executor.deploy_with_env(env.clone(), None) {
match error {
// Reverted transactions should be skipped
EvmError::Execution(_) => (),
error => {
return Err(error).wrap_err_with(|| {
format!(
"Failed to deploy transaction: {:?} in block {}",
tx.tx_hash(),
env.evm_env.block_env.number
)
});
}
}
}
}
pb.set_position((index + 1) as u64);
}
}
}
// Execute our transaction
let result = {
executor.set_trace_printer(self.trace_printer);
configure_tx_env(&mut env.as_env_mut(), &tx.inner);
if is_impersonated_tx(tx.inner.inner.inner()) {
env.evm_env.cfg_env.disable_balance_check = true;
}
if let Some(to) = Transaction::to(&tx) {
trace!(tx=?tx.tx_hash(), to=?to, "executing call transaction");
TraceResult::try_from(executor.transact_with_env(env))?
} else {
trace!(tx=?tx.tx_hash(), "executing create transaction");
TraceResult::try_from(executor.deploy_with_env(env, None))?
}
};
let contracts_bytecode = fetch_contracts_bytecode_from_trace(&executor, &result)?;
handle_traces(
result,
&config,
chain,
&contracts_bytecode,
label,
with_local_artifacts,
debug,
decode_internal,
disable_labels,
self.trace_depth,
)
.await?;
Ok(())
}
}
pub fn fetch_contracts_bytecode_from_trace(
executor: &Executor,
result: &TraceResult,
) -> Result<HashMap<Address, Bytes>> {
let mut contracts_bytecode = HashMap::default();
if let Some(ref traces) = result.traces {
contracts_bytecode.extend(gather_trace_addresses(traces).filter_map(|addr| {
// All relevant bytecodes should already be cached in the executor.
let code = executor
.backend()
.basic_ref(addr)
.inspect_err(|e| _ = sh_warn!("Failed to fetch code for {addr}: {e}"))
.ok()??
.code?
.bytes();
if code.is_empty() {
return None;
}
Some((addr, code))
}));
}
Ok(contracts_bytecode)
}
fn gather_trace_addresses(traces: &Traces) -> impl Iterator<Item = Address> {
let mut addresses = AddressSet::default();
for (_, trace) in traces {
for node in trace.arena.nodes() {
if !node.trace.address.is_zero() {
addresses.insert(node.trace.address);
}
if !node.trace.caller.is_zero() {
addresses.insert(node.trace.caller);
}
}
}
addresses.into_iter()
}
impl figment::Provider for RunArgs {
fn metadata(&self) -> Metadata {
Metadata::named("RunArgs")
}
fn data(&self) -> Result<Map<Profile, Dict>, figment::Error> {
let mut map = Map::new();
if let Some(api_key) = &self.etherscan.key {
map.insert("etherscan_api_key".into(), api_key.as_str().into());
}
if let Some(evm_version) = self.evm_version {
map.insert("evm_version".into(), figment::value::Value::serialize(evm_version)?);
}
Ok(Map::from([(Config::selected_profile(), map)]))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/erc20.rs | crates/cast/src/cmd/erc20.rs | use std::str::FromStr;
use crate::{
cmd::send::cast_send,
format_uint_exp,
tx::{SendTxOpts, signing_provider},
};
use alloy_eips::BlockId;
use alloy_ens::NameOrAddress;
use alloy_primitives::U256;
use alloy_sol_types::sol;
use clap::Parser;
use foundry_cli::{
opts::RpcOpts,
utils::{LoadConfig, get_provider},
};
#[doc(hidden)]
pub use foundry_config::utils::*;
sol! {
#[sol(rpc)]
interface IERC20 {
#[derive(Debug)]
function name() external view returns (string);
function symbol() external view returns (string);
function decimals() external view returns (uint8);
function totalSupply() external view returns (uint256);
function balanceOf(address owner) external view returns (uint256);
function transfer(address to, uint256 amount) external returns (bool);
function approve(address spender, uint256 amount) external returns (bool);
function allowance(address owner, address spender) external view returns (uint256);
function mint(address to, uint256 amount) external;
function burn(uint256 amount) external;
}
}
/// Interact with ERC20 tokens.
#[derive(Debug, Parser, Clone)]
pub enum Erc20Subcommand {
/// Query ERC20 token balance.
#[command(visible_alias = "b")]
Balance {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The owner to query balance for.
#[arg(value_parser = NameOrAddress::from_str)]
owner: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Transfer ERC20 tokens.
#[command(visible_alias = "t")]
Transfer {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The recipient address.
#[arg(value_parser = NameOrAddress::from_str)]
to: NameOrAddress,
/// The amount to transfer.
amount: String,
#[command(flatten)]
send_tx: SendTxOpts,
},
/// Approve ERC20 token spending.
#[command(visible_alias = "a")]
Approve {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The spender address.
#[arg(value_parser = NameOrAddress::from_str)]
spender: NameOrAddress,
/// The amount to approve.
amount: String,
#[command(flatten)]
send_tx: SendTxOpts,
},
/// Query ERC20 token allowance.
#[command(visible_alias = "al")]
Allowance {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The owner address.
#[arg(value_parser = NameOrAddress::from_str)]
owner: NameOrAddress,
/// The spender address.
#[arg(value_parser = NameOrAddress::from_str)]
spender: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Query ERC20 token name.
#[command(visible_alias = "n")]
Name {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Query ERC20 token symbol.
#[command(visible_alias = "s")]
Symbol {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Query ERC20 token decimals.
#[command(visible_alias = "d")]
Decimals {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Query ERC20 token total supply.
#[command(visible_alias = "ts")]
TotalSupply {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The block height to query at.
#[arg(long, short = 'B')]
block: Option<BlockId>,
#[command(flatten)]
rpc: RpcOpts,
},
/// Mint ERC20 tokens (if the token supports minting).
#[command(visible_alias = "m")]
Mint {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The recipient address.
#[arg(value_parser = NameOrAddress::from_str)]
to: NameOrAddress,
/// The amount to mint.
amount: String,
#[command(flatten)]
send_tx: SendTxOpts,
},
/// Burn ERC20 tokens.
#[command(visible_alias = "bu")]
Burn {
/// The ERC20 token contract address.
#[arg(value_parser = NameOrAddress::from_str)]
token: NameOrAddress,
/// The amount to burn.
amount: String,
#[command(flatten)]
send_tx: SendTxOpts,
},
}
impl Erc20Subcommand {
fn rpc(&self) -> &RpcOpts {
match self {
Self::Allowance { rpc, .. } => rpc,
Self::Approve { send_tx, .. } => &send_tx.eth.rpc,
Self::Balance { rpc, .. } => rpc,
Self::Transfer { send_tx, .. } => &send_tx.eth.rpc,
Self::Name { rpc, .. } => rpc,
Self::Symbol { rpc, .. } => rpc,
Self::Decimals { rpc, .. } => rpc,
Self::TotalSupply { rpc, .. } => rpc,
Self::Mint { send_tx, .. } => &send_tx.eth.rpc,
Self::Burn { send_tx, .. } => &send_tx.eth.rpc,
}
}
pub async fn run(self) -> eyre::Result<()> {
let config = self.rpc().load_config()?;
match self {
// Read-only
Self::Allowance { token, owner, spender, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let owner = owner.resolve(&provider).await?;
let spender = spender.resolve(&provider).await?;
let allowance = IERC20::new(token, &provider)
.allowance(owner, spender)
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", format_uint_exp(allowance))?
}
Self::Balance { token, owner, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let owner = owner.resolve(&provider).await?;
let balance = IERC20::new(token, &provider)
.balanceOf(owner)
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", format_uint_exp(balance))?
}
Self::Name { token, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let name = IERC20::new(token, &provider)
.name()
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", name)?
}
Self::Symbol { token, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let symbol = IERC20::new(token, &provider)
.symbol()
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", symbol)?
}
Self::Decimals { token, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let decimals = IERC20::new(token, &provider)
.decimals()
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", decimals)?
}
Self::TotalSupply { token, block, .. } => {
let provider = get_provider(&config)?;
let token = token.resolve(&provider).await?;
let total_supply = IERC20::new(token, &provider)
.totalSupply()
.block(block.unwrap_or_default())
.call()
.await?;
sh_println!("{}", format_uint_exp(total_supply))?
}
// State-changing
Self::Transfer { token, to, amount, send_tx, .. } => {
let provider = signing_provider(&send_tx).await?;
let tx = IERC20::new(token.resolve(&provider).await?, &provider)
.transfer(to.resolve(&provider).await?, U256::from_str(&amount)?)
.into_transaction_request();
cast_send(
provider,
tx,
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
send_tx.timeout.unwrap_or(config.transaction_timeout),
)
.await?
}
Self::Approve { token, spender, amount, send_tx, .. } => {
let provider = signing_provider(&send_tx).await?;
let tx = IERC20::new(token.resolve(&provider).await?, &provider)
.approve(spender.resolve(&provider).await?, U256::from_str(&amount)?)
.into_transaction_request();
cast_send(
provider,
tx,
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
send_tx.timeout.unwrap_or(config.transaction_timeout),
)
.await?
}
Self::Mint { token, to, amount, send_tx, .. } => {
let provider = signing_provider(&send_tx).await?;
let tx = IERC20::new(token.resolve(&provider).await?, &provider)
.mint(to.resolve(&provider).await?, U256::from_str(&amount)?)
.into_transaction_request();
cast_send(
provider,
tx,
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
send_tx.timeout.unwrap_or(config.transaction_timeout),
)
.await?
}
Self::Burn { token, amount, send_tx, .. } => {
let provider = signing_provider(&send_tx).await?;
let tx = IERC20::new(token.resolve(&provider).await?, &provider)
.burn(U256::from_str(&amount)?)
.into_transaction_request();
cast_send(
provider,
tx,
send_tx.cast_async,
send_tx.sync,
send_tx.confirmations,
send_tx.timeout.unwrap_or(config.transaction_timeout),
)
.await?
}
};
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/find_block.rs | crates/cast/src/cmd/find_block.rs | use crate::Cast;
use alloy_provider::Provider;
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::RpcOpts,
utils::{self, LoadConfig},
};
use futures::join;
/// CLI arguments for `cast find-block`.
#[derive(Clone, Debug, Parser)]
pub struct FindBlockArgs {
/// The UNIX timestamp to search for, in seconds.
timestamp: u64,
#[command(flatten)]
rpc: RpcOpts,
}
impl FindBlockArgs {
pub async fn run(self) -> Result<()> {
let Self { timestamp, rpc } = self;
let ts_target = timestamp;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let last_block_num = provider.get_block_number().await?;
let cast_provider = Cast::new(provider);
let res = join!(cast_provider.timestamp(last_block_num), cast_provider.timestamp(1));
let ts_block_latest: u64 = res.0?.to();
let ts_block_1: u64 = res.1?.to();
let block_num = if ts_block_latest < ts_target {
// If the most recent block's timestamp is below the target, return it
last_block_num
} else if ts_block_1 > ts_target {
// If the target timestamp is below block 1's timestamp, return that
1
} else {
// Otherwise, find the block that is closest to the timestamp
let mut low_block = 1_u64; // block 0 has a timestamp of 0: https://github.com/ethereum/go-ethereum/issues/17042#issuecomment-559414137
let mut high_block = last_block_num;
let mut matching_block = None;
while high_block > low_block && matching_block.is_none() {
// Get timestamp of middle block (this approach to avoids overflow)
let high_minus_low_over_2 = high_block
.checked_sub(low_block)
.ok_or_else(|| eyre::eyre!("unexpected underflow"))
.unwrap()
.checked_div(2_u64)
.unwrap();
let mid_block = high_block.checked_sub(high_minus_low_over_2).unwrap();
let ts_mid_block = cast_provider.timestamp(mid_block).await?.to::<u64>();
// Check if we've found a match or should keep searching
if ts_mid_block == ts_target {
matching_block = Some(mid_block)
} else if high_block.checked_sub(low_block).unwrap() == 1_u64 {
// The target timestamp is in between these blocks. This rounds to the
// highest block if timestamp is equidistant between blocks
let res = join!(
cast_provider.timestamp(high_block),
cast_provider.timestamp(low_block)
);
let ts_high: u64 = res.0.unwrap().to();
let ts_low: u64 = res.1.unwrap().to();
let high_diff = ts_high.checked_sub(ts_target).unwrap();
let low_diff = ts_target.checked_sub(ts_low).unwrap();
let is_low = low_diff < high_diff;
matching_block = if is_low { Some(low_block) } else { Some(high_block) }
} else if ts_mid_block < ts_target {
low_block = mid_block;
} else {
high_block = mid_block;
}
}
matching_block.unwrap_or(low_block)
};
sh_println!("{block_num}")?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/create2.rs | crates/cast/src/cmd/create2.rs | use alloy_primitives::{Address, B256, U256, hex, keccak256};
use clap::Parser;
use eyre::{Result, WrapErr};
use rand::{RngCore, SeedableRng, rngs::StdRng};
use regex::RegexSetBuilder;
use std::{
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
time::Instant,
};
// https://etherscan.io/address/0x4e59b44847b379578588920ca78fbf26c0b4956c#code
const DEPLOYER: &str = "0x4e59b44847b379578588920ca78fbf26c0b4956c";
/// CLI arguments for `cast create2`.
#[derive(Clone, Debug, Parser)]
pub struct Create2Args {
/// Prefix for the contract address.
#[arg(
long,
short,
required_unless_present_any = &["ends_with", "matching", "salt"],
value_name = "HEX"
)]
starts_with: Option<String>,
/// Suffix for the contract address.
#[arg(long, short, value_name = "HEX")]
ends_with: Option<String>,
/// Sequence that the address has to match.
#[arg(long, short, value_name = "HEX")]
matching: Option<String>,
/// Case sensitive matching.
#[arg(short, long)]
case_sensitive: bool,
/// Address of the contract deployer.
#[arg(
short,
long,
default_value = DEPLOYER,
value_name = "ADDRESS"
)]
deployer: Address,
/// Salt to be used for the contract deployment. This option separate from the default salt
/// mining with filters.
#[arg(
long,
conflicts_with_all = [
"starts_with",
"ends_with",
"matching",
"case_sensitive",
"caller",
"seed",
"no_random"
],
value_name = "HEX"
)]
salt: Option<String>,
/// Init code of the contract to be deployed.
#[arg(short, long, value_name = "HEX")]
init_code: Option<String>,
/// Init code hash of the contract to be deployed.
#[arg(alias = "ch", long, value_name = "HASH", required_unless_present = "init_code")]
init_code_hash: Option<String>,
/// Number of threads to use. Specifying 0 defaults to the number of logical cores.
#[arg(global = true, long, short = 'j', visible_alias = "jobs")]
threads: Option<usize>,
/// Address of the caller. Used for the first 20 bytes of the salt.
#[arg(long, value_name = "ADDRESS")]
caller: Option<Address>,
/// The random number generator's seed, used to initialize the salt.
#[arg(long, value_name = "HEX")]
seed: Option<B256>,
/// Don't initialize the salt with a random value, and instead use the default value of 0.
#[arg(long, conflicts_with = "seed")]
no_random: bool,
}
pub struct Create2Output {
pub address: Address,
pub salt: B256,
}
impl Create2Args {
pub fn run(self) -> Result<Create2Output> {
let Self {
starts_with,
ends_with,
matching,
case_sensitive,
deployer,
salt,
init_code,
init_code_hash,
threads,
caller,
seed,
no_random,
} = self;
let init_code_hash = if let Some(init_code_hash) = init_code_hash {
hex::FromHex::from_hex(init_code_hash)
} else if let Some(init_code) = init_code {
hex::decode(init_code).map(keccak256)
} else {
unreachable!();
}?;
if let Some(salt) = salt {
let salt = hex::FromHex::from_hex(salt)?;
let address = deployer.create2(salt, init_code_hash);
sh_println!("{address}")?;
return Ok(Create2Output { address, salt });
}
let mut regexs = vec![];
if let Some(matches) = matching {
if starts_with.is_some() || ends_with.is_some() {
eyre::bail!("Either use --matching or --starts/ends-with");
}
let matches = matches.trim_start_matches("0x");
if matches.len() != 40 {
eyre::bail!("Please provide a 40 characters long sequence for matching");
}
hex::decode(matches.replace('X', "0")).wrap_err("invalid matching hex provided")?;
// replacing X placeholders by . to match any character at these positions
regexs.push(matches.replace('X', "."));
}
if let Some(prefix) = starts_with {
regexs.push(format!(
r"^{}",
get_regex_hex_string(prefix).wrap_err("invalid prefix hex provided")?
));
}
if let Some(suffix) = ends_with {
regexs.push(format!(
r"{}$",
get_regex_hex_string(suffix).wrap_err("invalid suffix hex provided")?
))
}
debug_assert!(
regexs.iter().map(|p| p.len() - 1).sum::<usize>() <= 40,
"vanity patterns length exceeded. cannot be more than 40 characters",
);
let regex = RegexSetBuilder::new(regexs).case_insensitive(!case_sensitive).build()?;
let mut n_threads = threads.unwrap_or(0);
if n_threads == 0 {
n_threads = std::thread::available_parallelism().map_or(1, |n| n.get());
}
if cfg!(test) {
n_threads = n_threads.min(2);
}
let mut salt = B256::ZERO;
let remaining = if let Some(caller_address) = caller {
salt[..20].copy_from_slice(&caller_address.into_array());
&mut salt[20..]
} else {
&mut salt[..]
};
if !no_random {
let mut rng = match seed {
Some(seed) => StdRng::from_seed(seed.0),
None => StdRng::from_os_rng(),
};
rng.fill_bytes(remaining);
}
sh_println!("Configuration:")?;
sh_println!("Init code hash: {init_code_hash}")?;
sh_println!("Regex patterns: {:?}\n", regex.patterns())?;
sh_println!(
"Starting to generate deterministic contract address with {n_threads} threads..."
)?;
let mut handles = Vec::with_capacity(n_threads);
let found = Arc::new(AtomicBool::new(false));
let timer = Instant::now();
// Loops through all possible salts in parallel until a result is found.
// Each thread iterates over `(i..).step_by(n_threads)`.
for i in 0..n_threads {
// Create local copies for the thread.
let increment = n_threads;
let regex = regex.clone();
let regex_len = regex.patterns().len();
let found = Arc::clone(&found);
handles.push(std::thread::spawn(move || {
// Read the first bytes of the salt as a usize to be able to increment it.
struct B256Aligned(B256, [usize; 0]);
let mut salt = B256Aligned(salt, []);
// SAFETY: B256 is aligned to `usize`.
let salt_word = unsafe {
&mut *salt.0.as_mut_ptr().add(32 - usize::BITS as usize / 8).cast::<usize>()
};
// Important: add the thread index to the salt to avoid duplicate results.
*salt_word = salt_word.wrapping_add(i);
let mut checksum = [0; 42];
loop {
// Stop if a result was found in another thread.
if found.load(Ordering::Relaxed) {
break None;
}
// Calculate the `CREATE2` address.
#[expect(clippy::needless_borrows_for_generic_args)]
let addr = deployer.create2(&salt.0, &init_code_hash);
// Check if the regex matches the calculated address' checksum.
let _ = addr.to_checksum_raw(&mut checksum, None);
// SAFETY: stripping 2 ASCII bytes ("0x") off of an already valid UTF-8 string
// is safe.
let s = unsafe { std::str::from_utf8_unchecked(checksum.get_unchecked(2..)) };
if regex.matches(s).into_iter().count() == regex_len {
// Notify other threads that we found a result.
found.store(true, Ordering::Relaxed);
break Some((addr, salt.0));
}
// Increment the salt for the next iteration.
*salt_word = salt_word.wrapping_add(increment);
}
}));
}
let results = handles.into_iter().filter_map(|h| h.join().unwrap()).collect::<Vec<_>>();
let (address, salt) = results.into_iter().next().unwrap();
sh_println!("Successfully found contract address in {:?}", timer.elapsed())?;
sh_println!("Address: {address}")?;
sh_println!("Salt: {salt} ({})", U256::from_be_bytes(salt.0))?;
Ok(Create2Output { address, salt })
}
}
fn get_regex_hex_string(s: String) -> Result<String> {
let s = s.strip_prefix("0x").unwrap_or(&s);
let pad_width = s.len() + s.len() % 2;
hex::decode(format!("{s:0<pad_width$}"))?;
Ok(s.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{address, b256};
use std::str::FromStr;
#[test]
fn basic_create2() {
let mk_args = |args: &[&str]| {
Create2Args::parse_from(["foundry-cli", "--init-code-hash=0x0000000000000000000000000000000000000000000000000000000000000000"].iter().chain(args))
};
// even hex chars
let args = mk_args(&["--starts-with", "aa"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).starts_with("aa"));
let args = mk_args(&["--ends-with", "bb"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).ends_with("bb"));
// odd hex chars
let args = mk_args(&["--starts-with", "aaa"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).starts_with("aaa"));
let args = mk_args(&["--ends-with", "bbb"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).ends_with("bbb"));
// even hex chars with 0x prefix
let args = mk_args(&["--starts-with", "0xaa"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).starts_with("aa"));
// odd hex chars with 0x prefix
let args = mk_args(&["--starts-with", "0xaaa"]);
let create2_out = args.run().unwrap();
assert!(format!("{:x}", create2_out.address).starts_with("aaa"));
// check fails on wrong chars
let args = mk_args(&["--starts-with", "0xerr"]);
let create2_out = args.run();
assert!(create2_out.is_err());
// check fails on wrong x prefixed string provided
let args = mk_args(&["--starts-with", "x00"]);
let create2_out = args.run();
assert!(create2_out.is_err());
}
#[test]
fn matches_pattern() {
let args = Create2Args::parse_from([
"foundry-cli",
"--init-code-hash=0x0000000000000000000000000000000000000000000000000000000000000000",
"--matching=0xbbXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
]);
let create2_out = args.run().unwrap();
let address = create2_out.address;
assert!(format!("{address:x}").starts_with("bb"));
}
#[test]
fn create2_salt() {
let args = Create2Args::parse_from([
"foundry-cli",
"--deployer=0x8ba1f109551bD432803012645Ac136ddd64DBA72",
"--salt=0x7c5ea36004851c764c44143b1dcb59679b11c9a68e5f41497f6cf3d480715331",
"--init-code=0x6394198df16000526103ff60206004601c335afa6040516060f3",
]);
let create2_out = args.run().unwrap();
let address = create2_out.address;
assert_eq!(address, address!("0x533AE9D683B10C02EBDB05471642F85230071FC3"));
}
#[test]
fn create2_init_code() {
let init_code = "00";
let args =
Create2Args::parse_from(["foundry-cli", "--starts-with=cc", "--init-code", init_code]);
let create2_out = args.run().unwrap();
let address = create2_out.address;
assert!(format!("{address:x}").starts_with("cc"));
let salt = create2_out.salt;
let deployer = Address::from_str(DEPLOYER).unwrap();
assert_eq!(address, deployer.create2_from_code(salt, hex::decode(init_code).unwrap()));
}
#[test]
fn create2_init_code_hash() {
let init_code_hash = "bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a";
let args = Create2Args::parse_from([
"foundry-cli",
"--starts-with=dd",
"--init-code-hash",
init_code_hash,
]);
let create2_out = args.run().unwrap();
let address = create2_out.address;
assert!(format!("{address:x}").starts_with("dd"));
let salt = create2_out.salt;
let deployer = Address::from_str(DEPLOYER).unwrap();
assert_eq!(
address,
deployer
.create2(salt, B256::from_slice(hex::decode(init_code_hash).unwrap().as_slice()))
);
}
#[test]
fn create2_caller() {
let init_code_hash = "bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a";
let args = Create2Args::parse_from([
"foundry-cli",
"--starts-with=dd",
"--init-code-hash",
init_code_hash,
"--caller=0x66f9664f97F2b50F62D13eA064982f936dE76657",
]);
let create2_out = args.run().unwrap();
let address = create2_out.address;
let salt = create2_out.salt;
assert!(format!("{address:x}").starts_with("dd"));
assert!(format!("{salt:x}").starts_with("66f9664f97f2b50f62d13ea064982f936de76657"));
}
#[test]
fn deterministic_seed() {
let args = Create2Args::parse_from([
"foundry-cli",
"--starts-with=0x00",
"--init-code-hash=0x479d7e8f31234e208d704ba1a123c76385cea8a6981fd675b784fbd9cffb918d",
"--seed=0x479d7e8f31234e208d704ba1a123c76385cea8a6981fd675b784fbd9cffb918d",
"-j1",
]);
let out = args.run().unwrap();
assert_eq!(out.address, address!("0x00614b3D65ac4a09A376a264fE1aE5E5E12A6C43"));
assert_eq!(
out.salt,
b256!("0x322113f523203e2c0eb00bbc8e69208b0eb0c8dad0eaac7b01d64ff016edb40d"),
);
}
#[test]
fn deterministic_output() {
let args = Create2Args::parse_from([
"foundry-cli",
"--starts-with=0x00",
"--init-code-hash=0x479d7e8f31234e208d704ba1a123c76385cea8a6981fd675b784fbd9cffb918d",
"--no-random",
"-j1",
]);
let out = args.run().unwrap();
assert_eq!(out.address, address!("0x00bF495b8b42fdFeb91c8bCEB42CA4eE7186AEd2"));
assert_eq!(
out.salt,
b256!("0x000000000000000000000000000000000000000000000000df00000000000000"),
);
}
#[test]
fn j0() {
let args = Create2Args::try_parse_from([
"foundry-cli",
"--starts-with=00",
"--init-code-hash",
&B256::ZERO.to_string(),
"-j0",
])
.unwrap();
assert_eq!(args.threads, Some(0));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/rpc.rs | crates/cast/src/cmd/rpc.rs | use crate::Cast;
use clap::Parser;
use eyre::Result;
use foundry_cli::{opts::RpcOpts, utils, utils::LoadConfig};
use foundry_common::shell;
use itertools::Itertools;
/// CLI arguments for `cast rpc`.
#[derive(Clone, Debug, Parser)]
pub struct RpcArgs {
/// RPC method name
method: String,
/// RPC parameters
///
/// Interpreted as JSON:
///
/// cast rpc eth_getBlockByNumber 0x123 false
/// => {"method": "eth_getBlockByNumber", "params": ["0x123", false] ... }
params: Vec<String>,
/// Send raw JSON parameters
///
/// The first param will be interpreted as a raw JSON array of params.
/// If no params are given, stdin will be used. For example:
///
/// cast rpc eth_getBlockByNumber '["0x123", false]' --raw
/// => {"method": "eth_getBlockByNumber", "params": ["0x123", false] ... }
#[arg(long, short = 'w')]
raw: bool,
#[command(flatten)]
rpc: RpcOpts,
}
impl RpcArgs {
pub async fn run(self) -> Result<()> {
let Self { raw, method, params, rpc } = self;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let params = if raw {
if params.is_empty() {
serde_json::Deserializer::from_reader(std::io::stdin())
.into_iter()
.next()
.transpose()?
.ok_or_else(|| eyre::format_err!("Empty JSON parameters"))?
} else {
value_or_string(params.into_iter().join(" "))
}
} else {
serde_json::Value::Array(params.into_iter().map(value_or_string).collect())
};
let result = Cast::new(provider).rpc(&method, params).await?;
if shell::is_json() {
let result: serde_json::Value = serde_json::from_str(&result)?;
sh_println!("{}", serde_json::to_string_pretty(&result)?)?;
} else {
sh_println!("{}", result)?;
}
Ok(())
}
}
fn value_or_string(value: String) -> serde_json::Value {
serde_json::from_str(&value).unwrap_or(serde_json::Value::String(value))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/constructor_args.rs | crates/cast/src/cmd/constructor_args.rs | use super::{creation_code::fetch_creation_code_from_etherscan, interface::load_abi_from_file};
use alloy_dyn_abi::DynSolType;
use alloy_primitives::{Address, Bytes};
use alloy_provider::Provider;
use clap::Parser;
use eyre::{OptionExt, Result, eyre};
use foundry_cli::{
opts::{EtherscanOpts, RpcOpts},
utils::{self, LoadConfig, fetch_abi_from_etherscan},
};
use foundry_config::Config;
foundry_config::impl_figment_convert!(ConstructorArgsArgs, etherscan, rpc);
/// CLI arguments for `cast creation-args`.
#[derive(Parser)]
pub struct ConstructorArgsArgs {
/// An Ethereum address, for which the bytecode will be fetched.
contract: Address,
/// Path to file containing the contract's JSON ABI. It's necessary if the target contract is
/// not verified on Etherscan
#[arg(long)]
abi_path: Option<String>,
#[command(flatten)]
etherscan: EtherscanOpts,
#[command(flatten)]
rpc: RpcOpts,
}
impl ConstructorArgsArgs {
pub async fn run(self) -> Result<()> {
let mut config = self.load_config()?;
let Self { contract, abi_path, etherscan: _, rpc: _ } = self;
let provider = utils::get_provider(&config)?;
config.chain = Some(provider.get_chain_id().await?.into());
let bytecode = fetch_creation_code_from_etherscan(contract, &config, provider).await?;
let args_arr = parse_constructor_args(bytecode, contract, &config, abi_path).await?;
for arg in args_arr {
let _ = sh_println!("{arg}");
}
Ok(())
}
}
/// Fetches the constructor arguments values and types from the creation bytecode and ABI.
async fn parse_constructor_args(
bytecode: Bytes,
contract: Address,
config: &Config,
abi_path: Option<String>,
) -> Result<Vec<String>> {
let abi = if let Some(abi_path) = abi_path {
load_abi_from_file(&abi_path, None)?
} else {
fetch_abi_from_etherscan(contract, config).await?
};
let abi = abi.into_iter().next().ok_or_eyre("No ABI found.")?;
let (abi, _) = abi;
let constructor = abi.constructor.ok_or_else(|| eyre!("No constructor found."))?;
if constructor.inputs.is_empty() {
return Err(eyre!("No constructor arguments found."));
}
let args_size = constructor.inputs.len() * 32;
if bytecode.len() < args_size {
return Err(eyre!(
"Invalid creation bytecode length: have {} bytes, need at least {} for {} constructor inputs",
bytecode.len(),
args_size,
constructor.inputs.len()
));
}
let args_bytes = Bytes::from(bytecode[bytecode.len() - args_size..].to_vec());
let display_args: Vec<String> = args_bytes
.chunks(32)
.enumerate()
.map(|(i, arg)| format_arg(&constructor.inputs[i].ty, arg))
.collect::<Result<Vec<_>>>()?;
Ok(display_args)
}
fn format_arg(ty: &str, arg: &[u8]) -> Result<String> {
let arg_type: DynSolType = ty.parse()?;
let decoded = arg_type.abi_decode(arg)?;
let bytes = Bytes::from(arg.to_vec());
Ok(format!("{bytes} → {decoded:?}"))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/txpool.rs | crates/cast/src/cmd/txpool.rs | use alloy_primitives::Address;
use alloy_provider::ext::TxPoolApi;
use clap::Parser;
use foundry_cli::{
opts::RpcOpts,
utils::{self, LoadConfig},
};
/// CLI arguments for `cast tx-pool`.
#[derive(Debug, Parser, Clone)]
pub enum TxPoolSubcommands {
/// Fetches the content of the transaction pool.
Content {
#[command(flatten)]
args: RpcOpts,
},
/// Fetches the content of the transaction pool filtered by a specific address.
ContentFrom {
/// The Signer to filter the transactions by.
#[arg(short, long)]
from: Address,
#[command(flatten)]
args: RpcOpts,
},
/// Fetches a textual summary of each transaction in the pool.
Inspect {
#[command(flatten)]
args: RpcOpts,
},
/// Fetches the current status of the transaction pool.
Status {
#[command(flatten)]
args: RpcOpts,
},
}
impl TxPoolSubcommands {
pub async fn run(self) -> eyre::Result<()> {
match self {
Self::Content { args } => {
let config = args.load_config()?;
let provider = utils::get_provider(&config)?;
let content = provider.txpool_content().await?;
sh_println!("{}", serde_json::to_string_pretty(&content)?)?;
}
Self::ContentFrom { from, args } => {
let config = args.load_config()?;
let provider = utils::get_provider(&config)?;
let content = provider.txpool_content_from(from).await?;
sh_println!("{}", serde_json::to_string_pretty(&content)?)?;
}
Self::Inspect { args } => {
let config = args.load_config()?;
let provider = utils::get_provider(&config)?;
let inspect = provider.txpool_inspect().await?;
sh_println!("{}", serde_json::to_string_pretty(&inspect)?)?;
}
Self::Status { args } => {
let config = args.load_config()?;
let provider = utils::get_provider(&config)?;
let status = provider.txpool_status().await?;
sh_println!("{}", serde_json::to_string_pretty(&status)?)?;
}
};
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/logs.rs | crates/cast/src/cmd/logs.rs | use crate::Cast;
use alloy_dyn_abi::{DynSolType, DynSolValue, Specifier};
use alloy_ens::NameOrAddress;
use alloy_json_abi::Event;
use alloy_network::AnyNetwork;
use alloy_primitives::{Address, B256, hex::FromHex};
use alloy_rpc_types::{BlockId, BlockNumberOrTag, Filter, FilterBlockOption, FilterSet, Topic};
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::RpcOpts,
utils::{self, LoadConfig},
};
use itertools::Itertools;
use std::{io, str::FromStr};
/// CLI arguments for `cast logs`.
#[derive(Debug, Parser)]
pub struct LogsArgs {
/// The block height to start query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long)]
from_block: Option<BlockId>,
/// The block height to stop query at.
///
/// Can also be the tags earliest, finalized, safe, latest, or pending.
#[arg(long)]
to_block: Option<BlockId>,
/// The contract address to filter on.
#[arg(long, value_parser = NameOrAddress::from_str)]
address: Option<Vec<NameOrAddress>>,
/// The signature of the event to filter logs by which will be converted to the first topic or
/// a topic to filter on.
#[arg(value_name = "SIG_OR_TOPIC")]
sig_or_topic: Option<String>,
/// If used with a signature, the indexed fields of the event to filter by. Otherwise, the
/// remaining topics of the filter.
#[arg(value_name = "TOPICS_OR_ARGS")]
topics_or_args: Vec<String>,
/// If the RPC type and endpoints supports `eth_subscribe` stream logs instead of printing and
/// exiting. Will continue until interrupted or TO_BLOCK is reached.
#[arg(long)]
subscribe: bool,
/// Number of blocks to query in each chunk when the provider has range limits.
/// Defaults to 10000 blocks per chunk.
#[arg(long, default_value_t = 10000)]
query_size: u64,
#[command(flatten)]
rpc: RpcOpts,
}
impl LogsArgs {
pub async fn run(self) -> Result<()> {
let Self {
from_block,
to_block,
address,
sig_or_topic,
topics_or_args,
subscribe,
query_size,
rpc,
} = self;
let config = rpc.load_config()?;
let provider = utils::get_provider(&config)?;
let cast = Cast::new(&provider);
let addresses = match address {
Some(addresses) => Some(
futures::future::try_join_all(addresses.into_iter().map(|address| {
let provider = provider.clone();
async move { address.resolve(&provider).await }
}))
.await?,
),
None => None,
};
let from_block =
cast.convert_block_number(Some(from_block.unwrap_or_else(BlockId::earliest))).await?;
let to_block =
cast.convert_block_number(Some(to_block.unwrap_or_else(BlockId::latest))).await?;
let filter = build_filter(from_block, to_block, addresses, sig_or_topic, topics_or_args)?;
if !subscribe {
let logs = cast.filter_logs_chunked(filter, query_size).await?;
sh_println!("{logs}")?;
return Ok(());
}
// FIXME: this is a hotfix for <https://github.com/foundry-rs/foundry/issues/7682>
// currently the alloy `eth_subscribe` impl does not work with all transports, so we use
// the builtin transport here for now
let url = config.get_rpc_url_or_localhost_http()?;
let provider = alloy_provider::ProviderBuilder::<_, _, AnyNetwork>::default()
.connect(url.as_ref())
.await?;
let cast = Cast::new(&provider);
let mut stdout = io::stdout();
cast.subscribe(filter, &mut stdout).await?;
Ok(())
}
}
/// Builds a Filter by first trying to parse the `sig_or_topic` as an event signature. If
/// successful, `topics_or_args` is parsed as indexed inputs and converted to topics. Otherwise,
/// `sig_or_topic` is prepended to `topics_or_args` and used as raw topics.
fn build_filter(
from_block: Option<BlockNumberOrTag>,
to_block: Option<BlockNumberOrTag>,
address: Option<Vec<Address>>,
sig_or_topic: Option<String>,
topics_or_args: Vec<String>,
) -> Result<Filter, eyre::Error> {
let block_option = FilterBlockOption::Range { from_block, to_block };
let filter = match sig_or_topic {
// Try and parse the signature as an event signature
Some(sig_or_topic) => match foundry_common::abi::get_event(sig_or_topic.as_str()) {
Ok(event) => build_filter_event_sig(event, topics_or_args)?,
Err(_) => {
let topics = [vec![sig_or_topic], topics_or_args].concat();
build_filter_topics(topics)?
}
},
None => Filter::default(),
};
let mut filter = filter.select(block_option);
if let Some(address) = address {
filter = filter.address(address)
}
Ok(filter)
}
/// Creates a [Filter] from the given event signature and arguments.
fn build_filter_event_sig(event: Event, args: Vec<String>) -> Result<Filter, eyre::Error> {
let args = args.iter().map(|arg| arg.as_str()).collect::<Vec<_>>();
// Match the args to indexed inputs. Enumerate so that the ordering can be restored
// when merging the inputs with arguments and without arguments
let (with_args, without_args): (Vec<_>, Vec<_>) = event
.inputs
.iter()
.zip(args)
.filter(|(input, _)| input.indexed)
.map(|(input, arg)| {
let kind = input.resolve()?;
Ok((kind, arg))
})
.collect::<Result<Vec<(DynSolType, &str)>>>()?
.into_iter()
.enumerate()
.partition(|(_, (_, arg))| !arg.is_empty());
// Only parse the inputs with arguments
let indexed_tokens = with_args
.iter()
.map(|(_, (kind, arg))| kind.coerce_str(arg))
.collect::<Result<Vec<DynSolValue>, _>>()?;
// Merge the inputs restoring the original ordering
let mut topics = with_args
.into_iter()
.zip(indexed_tokens)
.map(|((i, _), t)| (i, Some(t)))
.chain(without_args.into_iter().map(|(i, _)| (i, None)))
.sorted_by(|(i1, _), (i2, _)| i1.cmp(i2))
.map(|(_, token)| {
token
.map(|token| Topic::from(B256::from_slice(token.abi_encode().as_slice())))
.unwrap_or(Topic::default())
})
.collect::<Vec<Topic>>();
topics.resize(3, Topic::default());
let filter = Filter::new()
.event_signature(event.selector())
.topic1(topics[0].clone())
.topic2(topics[1].clone())
.topic3(topics[2].clone());
Ok(filter)
}
/// Creates a [Filter] from raw topic hashes.
fn build_filter_topics(topics: Vec<String>) -> Result<Filter, eyre::Error> {
let mut topics = topics
.into_iter()
.map(|topic| {
if topic.is_empty() {
Ok(Topic::default())
} else {
Ok(Topic::from(B256::from_hex(topic.as_str())?))
}
})
.collect::<Result<Vec<FilterSet<_>>>>()?;
topics.resize(4, Topic::default());
let filter = Filter::new()
.event_signature(topics[0].clone())
.topic1(topics[1].clone())
.topic2(topics[2].clone())
.topic3(topics[3].clone());
Ok(filter)
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::{U160, U256};
use alloy_rpc_types::ValueOrArray;
const ADDRESS: &str = "0x4D1A2e2bB4F88F0250f26Ffff098B0b30B26BF38";
const TRANSFER_SIG: &str = "Transfer(address indexed,address indexed,uint256)";
const TRANSFER_TOPIC: &str =
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef";
#[test]
fn test_build_filter_basic() {
let from_block = Some(BlockNumberOrTag::from(1337));
let to_block = Some(BlockNumberOrTag::Latest);
let address = Address::from_str(ADDRESS).ok();
let expected = Filter {
block_option: FilterBlockOption::Range { from_block, to_block },
address: ValueOrArray::Value(address.unwrap()).into(),
topics: [vec![].into(), vec![].into(), vec![].into(), vec![].into()],
};
let filter =
build_filter(from_block, to_block, address.map(|addr| vec![addr]), None, vec![])
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_sig() {
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
B256::from_str(TRANSFER_TOPIC).unwrap().into(),
vec![].into(),
vec![].into(),
vec![].into(),
],
};
let filter =
build_filter(None, None, None, Some(TRANSFER_SIG.to_string()), vec![]).unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_mismatch() {
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
B256::from_str(TRANSFER_TOPIC).unwrap().into(),
vec![].into(),
vec![].into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
None,
Some("Swap(address indexed from, address indexed to, uint256 value)".to_string()), // Change signature, should result in error
vec![],
)
.unwrap();
assert_ne!(filter, expected)
}
#[test]
fn test_build_filter_sig_with_arguments() {
let addr = Address::from_str(ADDRESS).unwrap();
let addr = U256::from(U160::from_be_bytes(addr.0.0));
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
B256::from_str(TRANSFER_TOPIC).unwrap().into(),
addr.into(),
vec![].into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
None,
Some(TRANSFER_SIG.to_string()),
vec![ADDRESS.to_string()],
)
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_sig_with_skipped_arguments() {
let addr = Address::from_str(ADDRESS).unwrap();
let addr = U256::from(U160::from_be_bytes(addr.0.0));
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
vec![B256::from_str(TRANSFER_TOPIC).unwrap()].into(),
vec![].into(),
addr.into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
None,
Some(TRANSFER_SIG.to_string()),
vec![String::new(), ADDRESS.to_string()],
)
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_with_topics() {
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
vec![B256::from_str(TRANSFER_TOPIC).unwrap()].into(),
vec![B256::from_str(TRANSFER_TOPIC).unwrap()].into(),
vec![].into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
None,
Some(TRANSFER_TOPIC.to_string()),
vec![TRANSFER_TOPIC.to_string()],
)
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_with_skipped_topic() {
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![].into(),
topics: [
vec![B256::from_str(TRANSFER_TOPIC).unwrap()].into(),
vec![].into(),
vec![B256::from_str(TRANSFER_TOPIC).unwrap()].into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
None,
Some(TRANSFER_TOPIC.to_string()),
vec![String::new(), TRANSFER_TOPIC.to_string()],
)
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_with_multiple_addresses() {
let expected = Filter {
block_option: FilterBlockOption::Range { from_block: None, to_block: None },
address: vec![Address::ZERO, ADDRESS.parse().unwrap()].into(),
topics: [
vec![TRANSFER_TOPIC.parse().unwrap()].into(),
vec![].into(),
vec![].into(),
vec![].into(),
],
};
let filter = build_filter(
None,
None,
Some(vec![Address::ZERO, ADDRESS.parse().unwrap()]),
Some(TRANSFER_TOPIC.to_string()),
vec![],
)
.unwrap();
assert_eq!(filter, expected)
}
#[test]
fn test_build_filter_sig_with_mismatched_argument() {
let err = build_filter(
None,
None,
None,
Some(TRANSFER_SIG.to_string()),
vec!["1234".to_string()],
)
.err()
.unwrap()
.to_string()
.to_lowercase();
assert_eq!(err, "parser error:\n1234\n^\ninvalid string length");
}
#[test]
fn test_build_filter_with_invalid_sig_or_topic() {
let err = build_filter(None, None, None, Some("asdasdasd".to_string()), vec![])
.err()
.unwrap()
.to_string()
.to_lowercase();
assert_eq!(err, "odd number of digits");
}
#[test]
fn test_build_filter_with_invalid_sig_or_topic_hex() {
let err = build_filter(None, None, None, Some(ADDRESS.to_string()), vec![])
.err()
.unwrap()
.to_string()
.to_lowercase();
assert_eq!(err, "invalid string length");
}
#[test]
fn test_build_filter_with_invalid_topic() {
let err = build_filter(
None,
None,
None,
Some(TRANSFER_TOPIC.to_string()),
vec!["1234".to_string()],
)
.err()
.unwrap()
.to_string()
.to_lowercase();
assert_eq!(err, "invalid string length");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/mktx.rs | crates/cast/src/cmd/mktx.rs | use crate::tx::{self, CastTxBuilder};
use alloy_eips::Encodable2718;
use alloy_ens::NameOrAddress;
use alloy_network::{EthereumWallet, NetworkWallet, TransactionBuilder};
use alloy_primitives::{Address, hex};
use alloy_provider::Provider;
use alloy_signer::Signer;
use clap::Parser;
use eyre::Result;
use foundry_cli::{
opts::{EthereumOpts, TransactionOpts},
utils::{LoadConfig, get_provider},
};
use std::{path::PathBuf, str::FromStr};
/// CLI arguments for `cast mktx`.
#[derive(Debug, Parser)]
pub struct MakeTxArgs {
/// The destination of the transaction.
///
/// If not provided, you must use `cast mktx --create`.
#[arg(value_parser = NameOrAddress::from_str)]
to: Option<NameOrAddress>,
/// The signature of the function to call.
sig: Option<String>,
/// The arguments of the function to call.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
#[command(subcommand)]
command: Option<MakeTxSubcommands>,
#[command(flatten)]
tx: TransactionOpts,
/// The path of blob data to be sent.
#[arg(
long,
value_name = "BLOB_DATA_PATH",
conflicts_with = "legacy",
requires = "blob",
help_heading = "Transaction options"
)]
path: Option<PathBuf>,
#[command(flatten)]
eth: EthereumOpts,
/// Generate a raw RLP-encoded unsigned transaction.
///
/// Relaxes the wallet requirement.
#[arg(long)]
raw_unsigned: bool,
/// Call `eth_signTransaction` using the `--from` argument or $ETH_FROM as sender
#[arg(long, requires = "from", conflicts_with = "raw_unsigned")]
ethsign: bool,
}
#[derive(Debug, Parser)]
pub enum MakeTxSubcommands {
/// Use to deploy raw contract bytecode.
#[command(name = "--create")]
Create {
/// The initialization bytecode of the contract to deploy.
code: String,
/// The signature of the constructor.
sig: Option<String>,
/// The constructor arguments.
#[arg(allow_negative_numbers = true)]
args: Vec<String>,
},
}
impl MakeTxArgs {
pub async fn run(self) -> Result<()> {
let Self { to, mut sig, mut args, command, tx, path, eth, raw_unsigned, ethsign } = self;
let blob_data = if let Some(path) = path { Some(std::fs::read(path)?) } else { None };
let code = if let Some(MakeTxSubcommands::Create {
code,
sig: constructor_sig,
args: constructor_args,
}) = command
{
sig = constructor_sig;
args = constructor_args;
Some(code)
} else {
None
};
let config = eth.load_config()?;
let provider = get_provider(&config)?;
let tx_builder = CastTxBuilder::new(&provider, tx.clone(), &config)
.await?
.with_to(to)
.await?
.with_code_sig_and_args(code, sig, args)
.await?
.with_blob_data(blob_data)?;
if raw_unsigned {
// Build unsigned raw tx
// Check if nonce is provided when --from is not specified
// See: <https://github.com/foundry-rs/foundry/issues/11110>
if eth.wallet.from.is_none() && tx.nonce.is_none() {
eyre::bail!(
"Missing required parameters for raw unsigned transaction. When --from is not provided, you must specify: --nonce"
);
}
// Use zero address as placeholder for unsigned transactions
let from = eth.wallet.from.unwrap_or(Address::ZERO);
let raw_tx = tx_builder.build_unsigned_raw(from).await?;
sh_println!("{raw_tx}")?;
return Ok(());
}
let is_tempo = tx_builder.is_tempo();
if ethsign {
// Use "eth_signTransaction" to sign the transaction only works if the node/RPC has
// unlocked accounts.
let (tx, _) = tx_builder.build(config.sender).await?;
let signed_tx = provider.sign_transaction(tx.into_inner()).await?;
sh_println!("{signed_tx}")?;
return Ok(());
}
// Default to using the local signer.
// Get the signer from the wallet, and fail if it can't be constructed.
let signer = eth.wallet.signer().await?;
let from = signer.address();
tx::validate_from_address(eth.wallet.from, from)?;
// Handle Tempo transactions separately
// TODO(onbjerg): All of this is a side effect of a few things, most notably that we do
// not use `FoundryNetwork` and `FoundryTransactionRequest` everywhere, which is
// downstream of the fact that we use `EthereumWallet` everywhere.
if is_tempo {
let (ftx, _) = tx_builder.build(&signer).await?;
// Sign using NetworkWallet<FoundryNetwork>
let signed_tx = signer.sign_request(ftx).await?;
// Encode as 2718
let mut raw_tx = Vec::with_capacity(signed_tx.encode_2718_len());
signed_tx.encode_2718(&mut raw_tx);
let signed_tx_hex = hex::encode(&raw_tx);
sh_println!("0x{signed_tx_hex}")?;
return Ok(());
}
let (tx, _) = tx_builder.build(&signer).await?;
let tx = tx.into_inner().build(&EthereumWallet::new(signer)).await?;
let signed_tx = hex::encode(tx.encoded_2718());
sh_println!("0x{signed_tx}")?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/wallet/vanity.rs | crates/cast/src/cmd/wallet/vanity.rs | use alloy_primitives::{Address, hex};
use alloy_signer::{k256::ecdsa::SigningKey, utils::secret_key_to_address};
use alloy_signer_local::PrivateKeySigner;
use clap::Parser;
use eyre::Result;
use foundry_common::sh_println;
use itertools::Either;
use rayon::iter::{self, ParallelIterator};
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::{
fs,
path::{Path, PathBuf},
time::Instant,
};
/// Type alias for the result of [generate_wallet].
pub type GeneratedWallet = (SigningKey, Address);
/// CLI arguments for `cast wallet vanity`.
#[derive(Clone, Debug, Parser)]
pub struct VanityArgs {
/// Prefix regex pattern or hex string.
#[arg(long, value_name = "PATTERN", required_unless_present = "ends_with")]
pub starts_with: Option<String>,
/// Suffix regex pattern or hex string.
#[arg(long, value_name = "PATTERN")]
pub ends_with: Option<String>,
// 2^64-1 is max possible nonce per [eip-2681](https://eips.ethereum.org/EIPS/eip-2681).
/// Generate a vanity contract address created by the generated keypair with the specified
/// nonce.
#[arg(long)]
pub nonce: Option<u64>,
/// Path to save the generated vanity contract address to.
///
/// If provided, the generated vanity addresses will appended to a JSON array in the specified
/// file.
#[arg(
long,
value_hint = clap::ValueHint::FilePath,
value_name = "PATH",
)]
pub save_path: Option<PathBuf>,
}
/// WalletData contains address and private_key information for a wallet.
#[derive(Serialize, Deserialize)]
struct WalletData {
address: String,
private_key: String,
}
/// Wallets is a collection of WalletData.
#[derive(Default, Serialize, Deserialize)]
struct Wallets {
wallets: Vec<WalletData>,
}
impl WalletData {
pub fn new(wallet: &PrivateKeySigner) -> Self {
Self {
address: wallet.address().to_checksum(None),
private_key: format!("0x{}", hex::encode(wallet.credential().to_bytes())),
}
}
}
impl VanityArgs {
pub fn run(self) -> Result<PrivateKeySigner> {
let Self { starts_with, ends_with, nonce, save_path } = self;
let mut left_exact_hex = None;
let mut left_regex = None;
if let Some(prefix) = starts_with {
match parse_pattern(&prefix, true)? {
Either::Left(left) => left_exact_hex = Some(left),
Either::Right(re) => left_regex = Some(re),
}
}
let mut right_exact_hex = None;
let mut right_regex = None;
if let Some(suffix) = ends_with {
match parse_pattern(&suffix, false)? {
Either::Left(right) => right_exact_hex = Some(right),
Either::Right(re) => right_regex = Some(re),
}
}
macro_rules! find_vanity {
($m:ident, $nonce:ident) => {
if let Some(nonce) = $nonce {
find_vanity_address_with_nonce($m, nonce)
} else {
find_vanity_address($m)
}
};
}
sh_println!("Starting to generate vanity address...")?;
let timer = Instant::now();
let wallet = match (left_exact_hex, left_regex, right_exact_hex, right_regex) {
(Some(left), _, Some(right), _) => {
let matcher = HexMatcher { left, right };
find_vanity!(matcher, nonce)
}
(Some(left), _, _, Some(right)) => {
let matcher = LeftExactRightRegexMatcher { left, right };
find_vanity!(matcher, nonce)
}
(_, Some(left), _, Some(right)) => {
let matcher = RegexMatcher { left, right };
find_vanity!(matcher, nonce)
}
(_, Some(left), Some(right), _) => {
let matcher = LeftRegexRightExactMatcher { left, right };
find_vanity!(matcher, nonce)
}
(Some(left), None, None, None) => {
let matcher = LeftHexMatcher { left };
find_vanity!(matcher, nonce)
}
(None, None, Some(right), None) => {
let matcher = RightHexMatcher { right };
find_vanity!(matcher, nonce)
}
(None, Some(re), None, None) => {
let matcher = SingleRegexMatcher { re };
find_vanity!(matcher, nonce)
}
(None, None, None, Some(re)) => {
let matcher = SingleRegexMatcher { re };
find_vanity!(matcher, nonce)
}
_ => unreachable!(),
}
.expect("failed to generate vanity wallet");
// If a save path is provided, save the generated vanity wallet to the specified path.
if let Some(save_path) = save_path {
save_wallet_to_file(&wallet, &save_path)?;
}
sh_println!(
"Successfully found vanity address in {:.3} seconds.{}{}\nAddress: {}\nPrivate Key: 0x{}",
timer.elapsed().as_secs_f64(),
if nonce.is_some() { "\nContract address: " } else { "" },
if let Some(nonce_val) = nonce {
wallet.address().create(nonce_val).to_checksum(None)
} else {
String::new()
},
wallet.address().to_checksum(None),
hex::encode(wallet.credential().to_bytes()),
)?;
Ok(wallet)
}
}
/// Saves the specified `wallet` to a 'vanity_addresses.json' file at the given `save_path`.
/// If the file exists, the wallet data is appended to the existing content;
/// otherwise, a new file is created.
fn save_wallet_to_file(wallet: &PrivateKeySigner, path: &Path) -> Result<()> {
let mut wallets = if path.exists() {
let data = fs::read_to_string(path)?;
serde_json::from_str::<Wallets>(&data).unwrap_or_default()
} else {
Wallets::default()
};
wallets.wallets.push(WalletData::new(wallet));
fs::write(path, serde_json::to_string_pretty(&wallets)?)?;
Ok(())
}
/// Generates random wallets until `matcher` matches the wallet address, returning the wallet.
pub fn find_vanity_address<T: VanityMatcher>(matcher: T) -> Option<PrivateKeySigner> {
wallet_generator().find_any(create_matcher(matcher)).map(|(key, _)| key.into())
}
/// Generates random wallets until `matcher` matches the contract address created at `nonce`,
/// returning the wallet.
pub fn find_vanity_address_with_nonce<T: VanityMatcher>(
matcher: T,
nonce: u64,
) -> Option<PrivateKeySigner> {
wallet_generator().find_any(create_nonce_matcher(matcher, nonce)).map(|(key, _)| key.into())
}
/// Creates a matcher function, which takes a reference to a [GeneratedWallet] and returns
/// whether it found a match or not by using `matcher`.
#[inline]
pub fn create_matcher<T: VanityMatcher>(matcher: T) -> impl Fn(&GeneratedWallet) -> bool {
move |(_, addr)| matcher.is_match(addr)
}
/// Creates a contract address matcher function that uses the specified nonce.
/// The returned function takes a reference to a [GeneratedWallet] and returns
/// whether the contract address created with the nonce matches using `matcher`.
#[inline]
pub fn create_nonce_matcher<T: VanityMatcher>(
matcher: T,
nonce: u64,
) -> impl Fn(&GeneratedWallet) -> bool {
move |(_, addr)| {
let contract_addr = addr.create(nonce);
matcher.is_match(&contract_addr)
}
}
/// Returns an infinite parallel iterator which yields a [GeneratedWallet].
#[inline]
pub fn wallet_generator() -> iter::Map<iter::Repeat<()>, impl Fn(()) -> GeneratedWallet> {
iter::repeat(()).map(|()| generate_wallet())
}
/// Generates a random K-256 signing key and derives its Ethereum address.
pub fn generate_wallet() -> GeneratedWallet {
let key = SigningKey::random(&mut rand_08::thread_rng());
let address = secret_key_to_address(&key);
(key, address)
}
/// A trait to match vanity addresses.
pub trait VanityMatcher: Send + Sync {
fn is_match(&self, addr: &Address) -> bool;
}
/// Matches start and end hex.
pub struct HexMatcher {
pub left: Vec<u8>,
pub right: Vec<u8>,
}
impl VanityMatcher for HexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let bytes = addr.as_slice();
bytes.starts_with(&self.left) && bytes.ends_with(&self.right)
}
}
/// Matches only start hex.
pub struct LeftHexMatcher {
pub left: Vec<u8>,
}
impl VanityMatcher for LeftHexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let bytes = addr.as_slice();
bytes.starts_with(&self.left)
}
}
/// Matches only end hex.
pub struct RightHexMatcher {
pub right: Vec<u8>,
}
impl VanityMatcher for RightHexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let bytes = addr.as_slice();
bytes.ends_with(&self.right)
}
}
/// Matches start hex and end regex.
pub struct LeftExactRightRegexMatcher {
pub left: Vec<u8>,
pub right: Regex,
}
impl VanityMatcher for LeftExactRightRegexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let bytes = addr.as_slice();
bytes.starts_with(&self.left) && self.right.is_match(&hex::encode(bytes))
}
}
/// Matches start regex and end hex.
pub struct LeftRegexRightExactMatcher {
pub left: Regex,
pub right: Vec<u8>,
}
impl VanityMatcher for LeftRegexRightExactMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let bytes = addr.as_slice();
bytes.ends_with(&self.right) && self.left.is_match(&hex::encode(bytes))
}
}
/// Matches a single regex.
pub struct SingleRegexMatcher {
pub re: Regex,
}
impl VanityMatcher for SingleRegexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let addr = hex::encode(addr);
self.re.is_match(&addr)
}
}
/// Matches start and end regex.
pub struct RegexMatcher {
pub left: Regex,
pub right: Regex,
}
impl VanityMatcher for RegexMatcher {
#[inline]
fn is_match(&self, addr: &Address) -> bool {
let addr = hex::encode(addr);
self.left.is_match(&addr) && self.right.is_match(&addr)
}
}
fn parse_pattern(pattern: &str, is_start: bool) -> Result<Either<Vec<u8>, Regex>> {
if let Ok(decoded) = hex::decode(pattern) {
if decoded.len() > 20 {
return Err(eyre::eyre!("Hex pattern must be less than 20 bytes"));
}
Ok(Either::Left(decoded))
} else {
let (prefix, suffix) = if is_start { ("^", "") } else { ("", "$") };
Ok(Either::Right(Regex::new(&format!("{prefix}{pattern}{suffix}"))?))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn find_simple_vanity_start() {
let args: VanityArgs = VanityArgs::parse_from(["foundry-cli", "--starts-with", "00"]);
let wallet = args.run().unwrap();
let addr = wallet.address();
let addr = format!("{addr:x}");
assert!(addr.starts_with("00"));
}
#[test]
fn find_simple_vanity_start2() {
let args: VanityArgs = VanityArgs::parse_from(["foundry-cli", "--starts-with", "9"]);
let wallet = args.run().unwrap();
let addr = wallet.address();
let addr = format!("{addr:x}");
assert!(addr.starts_with('9'));
}
#[test]
fn find_simple_vanity_end() {
let args: VanityArgs = VanityArgs::parse_from(["foundry-cli", "--ends-with", "00"]);
let wallet = args.run().unwrap();
let addr = wallet.address();
let addr = format!("{addr:x}");
assert!(addr.ends_with("00"));
}
#[test]
fn save_path() {
let tmp = tempfile::NamedTempFile::new().unwrap();
let args: VanityArgs = VanityArgs::parse_from([
"foundry-cli",
"--starts-with",
"00",
"--save-path",
tmp.path().to_str().unwrap(),
]);
args.run().unwrap();
assert!(tmp.path().exists());
let s = fs::read_to_string(tmp.path()).unwrap();
let wallets: Wallets = serde_json::from_str(&s).unwrap();
assert!(!wallets.wallets.is_empty());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/wallet/list.rs | crates/cast/src/cmd/wallet/list.rs | use clap::Parser;
use eyre::Result;
use std::env;
use foundry_common::{fs, sh_err, sh_println};
use foundry_config::Config;
use foundry_wallets::wallet_multi::MultiWalletOptsBuilder;
/// CLI arguments for `cast wallet list`.
#[derive(Clone, Debug, Parser)]
pub struct ListArgs {
/// List all the accounts in the keystore directory.
/// Default keystore directory is used if no path provided.
#[arg(long, default_missing_value = "", num_args(0..=1))]
dir: Option<String>,
/// List accounts from a Ledger hardware wallet.
#[arg(long, short, group = "hw-wallets")]
ledger: bool,
/// List accounts from a Trezor hardware wallet.
#[arg(long, short, group = "hw-wallets")]
trezor: bool,
/// List accounts from AWS KMS.
///
/// Ensure either one of AWS_KMS_KEY_IDS (comma-separated) or AWS_KMS_KEY_ID environment
/// variables are set.
#[arg(long, hide = !cfg!(feature = "aws-kms"))]
aws: bool,
/// List accounts from Google Cloud KMS.
///
/// Ensure the following environment variables are set: GCP_PROJECT_ID, GCP_LOCATION,
/// GCP_KEY_RING, GCP_KEY_NAME, GCP_KEY_VERSION.
///
/// See: <https://cloud.google.com/kms/docs>
#[arg(long, hide = !cfg!(feature = "gcp-kms"))]
gcp: bool,
/// List accounts from Turnkey.
#[arg(long, hide = !cfg!(feature = "turnkey"))]
turnkey: bool,
/// List all configured accounts.
#[arg(long, group = "hw-wallets")]
all: bool,
/// Max number of addresses to display from hardware wallets.
#[arg(long, short, default_value = "3", requires = "hw-wallets")]
max_senders: Option<usize>,
}
impl ListArgs {
pub async fn run(self) -> Result<()> {
// list local accounts as files in keystore dir, no need to unlock / provide password
if self.dir.is_some()
|| self.all
|| (!self.ledger && !self.trezor && !self.aws && !self.gcp)
{
match self.list_local_senders() {
Ok(()) => {}
Err(e) => {
if !self.all {
sh_err!("{}", e)?;
}
}
}
}
// Create options for multi wallet - ledger, trezor and AWS
let list_opts = MultiWalletOptsBuilder::default()
.ledger(self.ledger || self.all)
.mnemonic_indexes(Some(vec![0]))
.trezor(self.trezor || self.all)
.aws(self.aws || self.all)
.gcp(self.gcp || (self.all && gcp_env_vars_set()))
.turnkey(self.turnkey || self.all)
.interactives(0)
.interactive(false)
.build()
.expect("build multi wallet");
// macro to print senders for a list of signers
macro_rules! list_senders {
($signers:expr, $label:literal) => {
match $signers.await {
Ok(signers) => {
for signer in signers.unwrap_or_default().iter() {
signer
.available_senders(self.max_senders.unwrap())
.await?
.iter()
.for_each(|sender| {
let _ = sh_println!("{} ({})", sender, $label);
})
}
}
Err(e) => {
if !self.all {
sh_err!("{}", e)?;
}
}
}
};
}
list_senders!(list_opts.ledgers(), "Ledger");
list_senders!(list_opts.trezors(), "Trezor");
list_senders!(list_opts.aws_signers(), "AWS");
list_senders!(list_opts.gcp_signers(), "GCP");
Ok(())
}
fn list_local_senders(&self) -> Result<()> {
let keystore_path = self.dir.as_deref().unwrap_or_default();
let keystore_dir = if keystore_path.is_empty() {
// Create the keystore default directory if it doesn't exist
let default_dir = Config::foundry_keystores_dir().unwrap();
fs::create_dir_all(&default_dir)?;
default_dir
} else {
dunce::canonicalize(keystore_path)?
};
// List all files within the keystore directory.
for entry in std::fs::read_dir(keystore_dir)? {
let path = entry?.path();
if path.is_file()
&& let Some(file_name) = path.file_name()
&& let Some(name) = file_name.to_str()
{
sh_println!("{name} (Local)")?;
}
}
Ok(())
}
}
fn gcp_env_vars_set() -> bool {
let required_vars =
["GCP_PROJECT_ID", "GCP_LOCATION", "GCP_KEY_RING", "GCP_KEY_NAME", "GCP_KEY_VERSION"];
required_vars.iter().all(|&var| env::var(var).is_ok())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/src/cmd/wallet/mod.rs | crates/cast/src/cmd/wallet/mod.rs | use alloy_chains::Chain;
use alloy_dyn_abi::TypedData;
use alloy_primitives::{Address, B256, Signature, U256, hex};
use alloy_provider::Provider;
use alloy_rpc_types::Authorization;
use alloy_signer::Signer;
use alloy_signer_local::{
MnemonicBuilder, PrivateKeySigner,
coins_bip39::{English, Entropy, Mnemonic},
};
use clap::Parser;
use eyre::{Context, Result};
use foundry_cli::{opts::RpcOpts, utils, utils::LoadConfig};
use foundry_common::{fs, sh_println, shell};
use foundry_config::Config;
use foundry_wallets::{RawWalletOpts, WalletOpts, WalletSigner};
use rand_08::thread_rng;
use serde_json::json;
use std::path::Path;
use yansi::Paint;
pub mod vanity;
use vanity::VanityArgs;
pub mod list;
use list::ListArgs;
/// CLI arguments for `cast wallet`.
#[derive(Debug, Parser)]
pub enum WalletSubcommands {
/// Create a new random keypair.
#[command(visible_alias = "n")]
New {
/// If provided, then keypair will be written to an encrypted JSON keystore.
path: Option<String>,
/// Account name for the keystore file. If provided, the keystore file
/// will be named using this account name.
#[arg(value_name = "ACCOUNT_NAME")]
account_name: Option<String>,
/// Triggers a hidden password prompt for the JSON keystore.
///
/// Deprecated: prompting for a hidden password is now the default.
#[arg(long, short, conflicts_with = "unsafe_password")]
password: bool,
/// Password for the JSON keystore in cleartext.
///
/// This is UNSAFE to use and we recommend using the --password.
#[arg(long, env = "CAST_PASSWORD", value_name = "PASSWORD")]
unsafe_password: Option<String>,
/// Number of wallets to generate.
#[arg(long, short, default_value = "1")]
number: u32,
},
/// Generates a random BIP39 mnemonic phrase
#[command(visible_alias = "nm")]
NewMnemonic {
/// Number of words for the mnemonic
#[arg(long, short, default_value = "12")]
words: usize,
/// Number of accounts to display
#[arg(long, short, default_value = "1")]
accounts: u8,
/// Entropy to use for the mnemonic
#[arg(long, short, conflicts_with = "words")]
entropy: Option<String>,
},
/// Generate a vanity address.
#[command(visible_alias = "va")]
Vanity(VanityArgs),
/// Convert a private key to an address.
#[command(visible_aliases = &["a", "addr"])]
Address {
/// If provided, the address will be derived from the specified private key.
#[arg(value_name = "PRIVATE_KEY")]
private_key_override: Option<String>,
#[command(flatten)]
wallet: WalletOpts,
},
/// Derive accounts from a mnemonic
#[command(visible_alias = "d")]
Derive {
/// The accounts will be derived from the specified mnemonic phrase.
#[arg(value_name = "MNEMONIC")]
mnemonic: String,
/// Number of accounts to display.
#[arg(long, short, default_value = "1")]
accounts: Option<u8>,
/// Insecure mode: display private keys in the terminal.
#[arg(long, default_value = "false")]
insecure: bool,
},
/// Sign a message or typed data.
#[command(visible_alias = "s")]
Sign {
/// The message, typed data, or hash to sign.
///
/// Messages starting with 0x are expected to be hex encoded, which get decoded before
/// being signed.
///
/// The message will be prefixed with the Ethereum Signed Message header and hashed before
/// signing, unless `--no-hash` is provided.
///
/// Typed data can be provided as a json string or a file name.
/// Use --data flag to denote the message is a string of typed data.
/// Use --data --from-file to denote the message is a file name containing typed data.
/// The data will be combined and hashed using the EIP712 specification before signing.
/// The data should be formatted as JSON.
message: String,
/// Treat the message as JSON typed data.
#[arg(long)]
data: bool,
/// Treat the message as a file containing JSON typed data. Requires `--data`.
#[arg(long, requires = "data")]
from_file: bool,
/// Treat the message as a raw 32-byte hash and sign it directly without hashing it again.
#[arg(long, conflicts_with = "data")]
no_hash: bool,
#[command(flatten)]
wallet: WalletOpts,
},
/// EIP-7702 sign authorization.
#[command(visible_alias = "sa")]
SignAuth {
/// Address to sign authorization for.
address: Address,
#[command(flatten)]
rpc: RpcOpts,
#[arg(long)]
nonce: Option<u64>,
#[arg(long)]
chain: Option<Chain>,
/// If set, indicates the authorization will be broadcast by the signing account itself.
/// This means the nonce used will be the current nonce + 1 (to account for the
/// transaction that will include this authorization).
#[arg(long, conflicts_with = "nonce")]
self_broadcast: bool,
#[command(flatten)]
wallet: WalletOpts,
},
/// Verify the signature of a message.
#[command(visible_alias = "v")]
Verify {
/// The original message.
///
/// Treats 0x-prefixed strings as hex encoded bytes.
/// Non 0x-prefixed strings are treated as raw input message.
///
/// The message will be prefixed with the Ethereum Signed Message header and hashed before
/// signing, unless `--no-hash` is provided.
///
/// Typed data can be provided as a json string or a file name.
/// Use --data flag to denote the message is a string of typed data.
/// Use --data --from-file to denote the message is a file name containing typed data.
/// The data will be combined and hashed using the EIP712 specification before signing.
/// The data should be formatted as JSON.
message: String,
/// The signature to verify.
signature: Signature,
/// The address of the message signer.
#[arg(long, short)]
address: Address,
/// Treat the message as JSON typed data.
#[arg(long)]
data: bool,
/// Treat the message as a file containing JSON typed data. Requires `--data`.
#[arg(long, requires = "data")]
from_file: bool,
/// Treat the message as a raw 32-byte hash and sign it directly without hashing it again.
#[arg(long, conflicts_with = "data")]
no_hash: bool,
},
/// Import a private key into an encrypted keystore.
#[command(visible_alias = "i")]
Import {
/// The name for the account in the keystore.
#[arg(value_name = "ACCOUNT_NAME")]
account_name: String,
/// If provided, keystore will be saved here instead of the default keystores directory
/// (~/.foundry/keystores)
#[arg(long, short)]
keystore_dir: Option<String>,
/// Password for the JSON keystore in cleartext
/// This is unsafe, we recommend using the default hidden password prompt
#[arg(long, env = "CAST_UNSAFE_PASSWORD", value_name = "PASSWORD")]
unsafe_password: Option<String>,
#[command(flatten)]
raw_wallet_options: RawWalletOpts,
},
/// List all the accounts in the keystore default directory
#[command(visible_alias = "ls")]
List(ListArgs),
/// Remove a wallet from the keystore.
///
/// This command requires the wallet alias and will prompt for a password to ensure that only
/// an authorized user can remove the wallet.
#[command(visible_aliases = &["rm"], override_usage = "cast wallet remove --name <NAME>")]
Remove {
/// The alias (or name) of the wallet to remove.
#[arg(long, required = true)]
name: String,
/// Optionally provide the keystore directory if not provided. default directory will be
/// used (~/.foundry/keystores).
#[arg(long)]
dir: Option<String>,
/// Password for the JSON keystore in cleartext
/// This is unsafe, we recommend using the default hidden password prompt
#[arg(long, env = "CAST_UNSAFE_PASSWORD", value_name = "PASSWORD")]
unsafe_password: Option<String>,
},
/// Derives private key from mnemonic
#[command(name = "private-key", visible_alias = "pk", aliases = &["derive-private-key", "--derive-private-key"])]
PrivateKey {
/// If provided, the private key will be derived from the specified mnemonic phrase.
#[arg(value_name = "MNEMONIC")]
mnemonic_override: Option<String>,
/// If provided, the private key will be derived using the
/// specified mnemonic index (if integer) or derivation path.
#[arg(value_name = "MNEMONIC_INDEX_OR_DERIVATION_PATH")]
mnemonic_index_or_derivation_path_override: Option<String>,
#[command(flatten)]
wallet: WalletOpts,
},
/// Get the public key for the given private key.
#[command(visible_aliases = &["pubkey"])]
PublicKey {
/// If provided, the public key will be derived from the specified private key.
#[arg(long = "raw-private-key", value_name = "PRIVATE_KEY")]
private_key_override: Option<String>,
#[command(flatten)]
wallet: WalletOpts,
},
/// Decrypt a keystore file to get the private key
#[command(name = "decrypt-keystore", visible_alias = "dk")]
DecryptKeystore {
/// The name for the account in the keystore.
#[arg(value_name = "ACCOUNT_NAME")]
account_name: String,
/// If not provided, keystore will try to be located at the default keystores directory
/// (~/.foundry/keystores)
#[arg(long, short)]
keystore_dir: Option<String>,
/// Password for the JSON keystore in cleartext
/// This is unsafe, we recommend using the default hidden password prompt
#[arg(long, env = "CAST_UNSAFE_PASSWORD", value_name = "PASSWORD")]
unsafe_password: Option<String>,
},
/// Change the password of a keystore file
#[command(name = "change-password", visible_alias = "cp")]
ChangePassword {
/// The name for the account in the keystore.
#[arg(value_name = "ACCOUNT_NAME")]
account_name: String,
/// If not provided, keystore will try to be located at the default keystores directory
/// (~/.foundry/keystores)
#[arg(long, short)]
keystore_dir: Option<String>,
/// Current password for the JSON keystore in cleartext
/// This is unsafe, we recommend using the default hidden password prompt
#[arg(long, env = "CAST_UNSAFE_PASSWORD", value_name = "PASSWORD")]
unsafe_password: Option<String>,
/// New password for the JSON keystore in cleartext
/// This is unsafe, we recommend using the default hidden password prompt
#[arg(long, env = "CAST_UNSAFE_NEW_PASSWORD", value_name = "NEW_PASSWORD")]
unsafe_new_password: Option<String>,
},
}
impl WalletSubcommands {
pub async fn run(self) -> Result<()> {
match self {
Self::New { path, account_name, unsafe_password, number, password } => {
let mut rng = thread_rng();
let mut json_values = if shell::is_json() { Some(vec![]) } else { None };
let path = if let Some(path) = path {
match dunce::canonicalize(&path) {
Ok(path) => {
if !path.is_dir() {
// we require path to be an existing directory
eyre::bail!("`{}` is not a directory", path.display());
}
Some(path)
}
Err(e) => {
eyre::bail!(
"If you specified a directory, please make sure it exists, or create it before running `cast wallet new <DIR>`.\n{path} is not a directory.\nError: {}",
e
);
}
}
} else if unsafe_password.is_some() || password {
let path = Config::foundry_keystores_dir().ok_or_else(|| {
eyre::eyre!("Could not find the default keystore directory.")
})?;
fs::create_dir_all(&path)?;
Some(path)
} else {
None
};
match path {
Some(path) => {
let password = if let Some(password) = unsafe_password {
password
} else {
// if no --unsafe-password was provided read via stdin
rpassword::prompt_password("Enter secret: ")?
};
for i in 0..number {
let account_name_ref =
account_name.as_deref().map(|name| match number {
1 => name.to_string(),
_ => format!("{}_{}", name, i + 1),
});
let (wallet, uuid) = PrivateKeySigner::new_keystore(
&path,
&mut rng,
password.clone(),
account_name_ref.as_deref(),
)?;
let identifier = account_name_ref.as_deref().unwrap_or(&uuid);
if let Some(json) = json_values.as_mut() {
json.push(if shell::verbosity() > 0 {
json!({
"address": wallet.address().to_checksum(None),
"public_key": format!("0x{}", hex::encode(wallet.public_key())),
"path": format!("{}", path.join(identifier).display()),
})
} else {
json!({
"address": wallet.address().to_checksum(None),
"path": format!("{}", path.join(identifier).display()),
})
});
} else {
sh_println!(
"Created new encrypted keystore file: {}",
path.join(identifier).display()
)?;
sh_println!("Address: {}", wallet.address().to_checksum(None))?;
if shell::verbosity() > 0 {
sh_println!(
"Public key: 0x{}",
hex::encode(wallet.public_key())
)?;
}
}
}
}
None => {
for _ in 0..number {
let wallet = PrivateKeySigner::random_with(&mut rng);
if let Some(json) = json_values.as_mut() {
json.push(if shell::verbosity() > 0 {
json!({
"address": wallet.address().to_checksum(None),
"public_key": format!("0x{}", hex::encode(wallet.public_key())),
"private_key": format!("0x{}", hex::encode(wallet.credential().to_bytes())),
})
} else {
json!({
"address": wallet.address().to_checksum(None),
"private_key": format!("0x{}", hex::encode(wallet.credential().to_bytes())),
})
});
} else {
sh_println!("Successfully created new keypair.")?;
sh_println!("Address: {}", wallet.address().to_checksum(None))?;
if shell::verbosity() > 0 {
sh_println!(
"Public key: 0x{}",
hex::encode(wallet.public_key())
)?;
}
sh_println!(
"Private key: 0x{}",
hex::encode(wallet.credential().to_bytes())
)?;
}
}
}
}
if let Some(json) = json_values.as_ref() {
sh_println!("{}", serde_json::to_string_pretty(json)?)?;
}
}
Self::NewMnemonic { words, accounts, entropy } => {
let phrase = if let Some(entropy) = entropy {
let entropy = Entropy::from_slice(hex::decode(entropy)?)?;
Mnemonic::<English>::new_from_entropy(entropy).to_phrase()
} else {
let mut rng = thread_rng();
Mnemonic::<English>::new_with_count(&mut rng, words)?.to_phrase()
};
let format_json = shell::is_json();
if !format_json {
sh_println!("{}", "Generating mnemonic from provided entropy...".yellow())?;
}
let builder = MnemonicBuilder::<English>::default().phrase(phrase.as_str());
let derivation_path = "m/44'/60'/0'/0/";
let wallets = (0..accounts)
.map(|i| builder.clone().derivation_path(format!("{derivation_path}{i}")))
.collect::<Result<Vec<_>, _>>()?;
let wallets =
wallets.into_iter().map(|b| b.build()).collect::<Result<Vec<_>, _>>()?;
if !format_json {
sh_println!("{}", "Successfully generated a new mnemonic.".green())?;
sh_println!("Phrase:\n{phrase}")?;
sh_println!("\nAccounts:")?;
}
let mut accounts = json!([]);
for (i, wallet) in wallets.iter().enumerate() {
let public_key = hex::encode(wallet.public_key());
let private_key = hex::encode(wallet.credential().to_bytes());
if format_json {
accounts.as_array_mut().unwrap().push(if shell::verbosity() > 0 {
json!({
"address": format!("{}", wallet.address()),
"public_key": format!("0x{}", public_key),
"private_key": format!("0x{}", private_key),
})
} else {
json!({
"address": format!("{}", wallet.address()),
"private_key": format!("0x{}", private_key),
})
});
} else {
sh_println!("- Account {i}:")?;
sh_println!("Address: {}", wallet.address())?;
if shell::verbosity() > 0 {
sh_println!("Public key: 0x{}", public_key)?;
}
sh_println!("Private key: 0x{}\n", private_key)?;
}
}
if format_json {
let obj = json!({
"mnemonic": phrase,
"accounts": accounts,
});
sh_println!("{}", serde_json::to_string_pretty(&obj)?)?;
}
}
Self::Vanity(cmd) => {
cmd.run()?;
}
Self::Address { wallet, private_key_override } => {
let wallet = private_key_override
.map(|pk| WalletOpts {
raw: RawWalletOpts { private_key: Some(pk), ..Default::default() },
..Default::default()
})
.unwrap_or(wallet)
.signer()
.await?;
let addr = wallet.address();
sh_println!("{}", addr.to_checksum(None))?;
}
Self::Derive { mnemonic, accounts, insecure } => {
let format_json = shell::is_json();
let mut accounts_json = json!([]);
for i in 0..accounts.unwrap_or(1) {
let wallet = WalletOpts {
raw: RawWalletOpts {
mnemonic: Some(mnemonic.clone()),
mnemonic_index: i as u32,
..Default::default()
},
..Default::default()
}
.signer()
.await?;
match wallet {
WalletSigner::Local(local_wallet) => {
let address = local_wallet.address().to_checksum(None);
let private_key = hex::encode(local_wallet.credential().to_bytes());
if format_json {
if insecure {
accounts_json.as_array_mut().unwrap().push(json!({
"address": format!("{}", address),
"private_key": format!("0x{}", private_key),
}));
} else {
accounts_json.as_array_mut().unwrap().push(json!({
"address": format!("{}", address)
}));
}
} else {
sh_println!("- Account {i}:")?;
if insecure {
sh_println!("Address: {}", address)?;
sh_println!("Private key: 0x{}\n", private_key)?;
} else {
sh_println!("Address: {}\n", address)?;
}
}
}
_ => eyre::bail!("Only local wallets are supported by this command"),
}
}
if format_json {
sh_println!("{}", serde_json::to_string_pretty(&accounts_json)?)?;
}
}
Self::PublicKey { wallet, private_key_override } => {
let wallet = private_key_override
.map(|pk| WalletOpts {
raw: RawWalletOpts { private_key: Some(pk), ..Default::default() },
..Default::default()
})
.unwrap_or(wallet)
.signer()
.await?;
let public_key = match wallet {
WalletSigner::Local(wallet) => wallet.public_key(),
_ => eyre::bail!("Only local wallets are supported by this command"),
};
sh_println!("0x{}", hex::encode(public_key))?;
}
Self::Sign { message, data, from_file, no_hash, wallet } => {
let wallet = wallet.signer().await?;
let sig = if data {
let typed_data: TypedData = if from_file {
// data is a file name, read json from file
foundry_common::fs::read_json_file(message.as_ref())?
} else {
// data is a json string
serde_json::from_str(&message)?
};
wallet.sign_dynamic_typed_data(&typed_data).await?
} else if no_hash {
wallet.sign_hash(&hex::decode(&message)?[..].try_into()?).await?
} else {
wallet.sign_message(&Self::hex_str_to_bytes(&message)?).await?
};
if shell::verbosity() > 0 {
if shell::is_json() {
sh_println!(
"{}",
serde_json::to_string_pretty(&json!({
"message": message,
"address": wallet.address(),
"signature": hex::encode(sig.as_bytes()),
}))?
)?;
} else {
sh_println!(
"Successfully signed!\n Message: {}\n Address: {}\n Signature: 0x{}",
message,
wallet.address(),
hex::encode(sig.as_bytes()),
)?;
}
} else {
// Pipe friendly output
sh_println!("0x{}", hex::encode(sig.as_bytes()))?;
}
}
Self::SignAuth { rpc, nonce, chain, wallet, address, self_broadcast } => {
let wallet = wallet.signer().await?;
let provider = utils::get_provider(&rpc.load_config()?)?;
let nonce = if let Some(nonce) = nonce {
nonce
} else {
let current_nonce = provider.get_transaction_count(wallet.address()).await?;
if self_broadcast {
// When self-broadcasting, the authorization nonce needs to be +1
// because the transaction itself will consume the current nonce
current_nonce + 1
} else {
current_nonce
}
};
let chain_id = if let Some(chain) = chain {
chain.id()
} else {
provider.get_chain_id().await?
};
let auth = Authorization { chain_id: U256::from(chain_id), address, nonce };
let signature = wallet.sign_hash(&auth.signature_hash()).await?;
let auth = auth.into_signed(signature);
if shell::verbosity() > 0 {
if shell::is_json() {
sh_println!(
"{}",
serde_json::to_string_pretty(&json!({
"nonce": nonce,
"chain_id": chain_id,
"address": wallet.address(),
"signature": hex::encode_prefixed(alloy_rlp::encode(&auth)),
}))?
)?;
} else {
sh_println!(
"Successfully signed!\n Nonce: {}\n Chain ID: {}\n Address: {}\n Signature: 0x{}",
nonce,
chain_id,
wallet.address(),
hex::encode_prefixed(alloy_rlp::encode(&auth)),
)?;
}
} else {
// Pipe friendly output
sh_println!("{}", hex::encode_prefixed(alloy_rlp::encode(&auth)))?;
}
}
Self::Verify { message, signature, address, data, from_file, no_hash } => {
let recovered_address = if data {
let typed_data: TypedData = if from_file {
// data is a file name, read json from file
foundry_common::fs::read_json_file(message.as_ref())?
} else {
// data is a json string
serde_json::from_str(&message)?
};
Self::recover_address_from_typed_data(&typed_data, &signature)?
} else if no_hash {
Self::recover_address_from_message_no_hash(
&hex::decode(&message)?[..].try_into()?,
&signature,
)?
} else {
Self::recover_address_from_message(&message, &signature)?
};
if address == recovered_address {
sh_println!("Validation succeeded. Address {address} signed this message.")?;
} else {
eyre::bail!("Validation failed. Address {address} did not sign this message.");
}
}
Self::Import { account_name, keystore_dir, unsafe_password, raw_wallet_options } => {
// Set up keystore directory
let dir = if let Some(path) = keystore_dir {
Path::new(&path).to_path_buf()
} else {
Config::foundry_keystores_dir().ok_or_else(|| {
eyre::eyre!("Could not find the default keystore directory.")
})?
};
fs::create_dir_all(&dir)?;
// check if account exists already
let keystore_path = Path::new(&dir).join(&account_name);
if keystore_path.exists() {
eyre::bail!("Keystore file already exists at {}", keystore_path.display());
}
// get wallet
let wallet = raw_wallet_options
.signer()?
.and_then(|s| match s {
WalletSigner::Local(s) => Some(s),
_ => None,
})
.ok_or_else(|| {
eyre::eyre!(
"\
Did you set a private key or mnemonic?
Run `cast wallet import --help` and use the corresponding CLI
flag to set your key via:
--private-key, --mnemonic-path or --interactive."
)
})?;
let private_key = wallet.credential().to_bytes();
let password = if let Some(password) = unsafe_password {
password
} else {
// if no --unsafe-password was provided read via stdin
rpassword::prompt_password("Enter password: ")?
};
let mut rng = thread_rng();
let (wallet, _) = PrivateKeySigner::encrypt_keystore(
dir,
&mut rng,
private_key,
password,
Some(&account_name),
)?;
let address = wallet.address();
let success_message = format!(
"`{}` keystore was saved successfully. Address: {:?}",
&account_name, address,
);
sh_println!("{}", success_message.green())?;
}
Self::List(cmd) => {
cmd.run().await?;
}
Self::Remove { name, dir, unsafe_password } => {
let dir = if let Some(path) = dir {
Path::new(&path).to_path_buf()
} else {
Config::foundry_keystores_dir().ok_or_else(|| {
eyre::eyre!("Could not find the default keystore directory.")
})?
};
let keystore_path = Path::new(&dir).join(&name);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/tests/cli/selectors.rs | crates/cast/tests/cli/selectors.rs | use foundry_test_utils::util::OutputExt;
use std::path::Path;
casttest!(error_decode_with_openchain, |prj, cmd| {
prj.clear_cache();
cmd.args(["decode-error", "0x7a0e198500000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000000064"]).assert_success().stdout_eq(str![[r#"
ValueTooHigh(uint256,uint256)
101
100
"#]]);
});
casttest!(fourbyte, |_prj, cmd| {
cmd.args(["4byte", "0xa9059cbb"]).assert_success().stdout_eq(str![[r#"
transfer(address,uint256)
"#]]);
});
casttest!(fourbyte_invalid, |_prj, cmd| {
cmd.args(["4byte", "0xa9059c"]).assert_failure().stderr_eq(str![[r#"
error: invalid value '0xa9059c' for '[SELECTOR]': invalid string length
For more information, try '--help'.
"#]]);
});
casttest!(fourbyte_calldata, |_prj, cmd| {
cmd.args(["4byte-calldata", "0xa9059cbb0000000000000000000000000a2ac0c368dc8ec680a0c98c907656bd970675950000000000000000000000000000000000000000000000000000000767954a79"]).assert_success().stdout_eq(str![[r#"
1) "transfer(address,uint256)"
0x0A2AC0c368Dc8eC680a0c98C907656BD97067595
31802608249 [3.18e10]
"#]]);
});
casttest!(fourbyte_calldata_only_selector, |_prj, cmd| {
cmd.args(["4byte-calldata", "0xa9059cbb"]).assert_success().stdout_eq(str![[r#"
transfer(address,uint256)
"#]]);
});
casttest!(fourbyte_calldata_alias, |_prj, cmd| {
cmd.args(["4byte-decode", "0xa9059cbb0000000000000000000000000a2ac0c368dc8ec680a0c98c907656bd970675950000000000000000000000000000000000000000000000000000000767954a79"]).assert_success().stdout_eq(str![[r#"
1) "transfer(address,uint256)"
0x0A2AC0c368Dc8eC680a0c98C907656BD97067595
31802608249 [3.18e10]
"#]]);
});
casttest!(fourbyte_event, |_prj, cmd| {
cmd.args(["4byte-event", "0x7e1db2a1cd12f0506ecd806dba508035b290666b84b096a87af2fd2a1516ede6"])
.assert_success()
.stdout_eq(str![[r#"
updateAuthority(address,uint8)
"#]]);
});
casttest!(fourbyte_event_2, |_prj, cmd| {
cmd.args(["4byte-event", "0xb7009613e63fb13fd59a2fa4c206a992c1f090a44e5d530be255aa17fed0b3dd"])
.assert_success()
.stdout_eq(str![[r#"
canCall(address,address,bytes4)
"#]]);
});
casttest!(upload_signatures, |_prj, cmd| {
// test no prefix is accepted as function
let output = cmd
.args(["upload-signature", "transfer(address,uint256)"])
.assert_success()
.get_output()
.stdout_lossy();
assert!(output.contains("Function transfer(address,uint256): 0xa9059cbb"), "{}", output);
// test event prefix
cmd.args(["upload-signature", "event Transfer(address,uint256)"]);
let output = cmd.assert_success().get_output().stdout_lossy();
assert!(output.contains("Event Transfer(address,uint256): 0x69ca02dd4edd7bf0a4abb9ed3b7af3f14778db5d61921c7dc7cd545266326de2"), "{}", output);
// test error prefix
cmd.args(["upload-signature", "error ERC20InsufficientBalance(address,uint256,uint256)"]);
let output = cmd.assert_success().get_output().stdout_lossy();
assert!(
output.contains("Function ERC20InsufficientBalance(address,uint256,uint256): 0xe450d38c"),
"{}",
output
); // Custom error is interpreted as function
// test multiple sigs
cmd.args([
"upload-signature",
"event Transfer(address,uint256)",
"transfer(address,uint256)",
"approve(address,uint256)",
]);
let output = cmd.assert_success().get_output().stdout_lossy();
assert!(output.contains("Event Transfer(address,uint256): 0x69ca02dd4edd7bf0a4abb9ed3b7af3f14778db5d61921c7dc7cd545266326de2"), "{}", output);
assert!(output.contains("Function transfer(address,uint256): 0xa9059cbb"), "{}", output);
assert!(output.contains("Function approve(address,uint256): 0x095ea7b3"), "{}", output);
// test abi
cmd.args([
"upload-signature",
"event Transfer(address,uint256)",
"transfer(address,uint256)",
"error ERC20InsufficientBalance(address,uint256,uint256)",
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/ERC20Artifact.json")
.as_os_str()
.to_str()
.unwrap(),
]);
let output = cmd.assert_success().get_output().stdout_lossy();
assert!(output.contains("Event Transfer(address,uint256): 0x69ca02dd4edd7bf0a4abb9ed3b7af3f14778db5d61921c7dc7cd545266326de2"), "{}", output);
assert!(output.contains("Function transfer(address,uint256): 0xa9059cbb"), "{}", output);
assert!(output.contains("Function approve(address,uint256): 0x095ea7b3"), "{}", output);
assert!(output.contains("Function decimals(): 0x313ce567"), "{}", output);
assert!(output.contains("Function allowance(address,address): 0xdd62ed3e"), "{}", output);
assert!(
output.contains("Function ERC20InsufficientBalance(address,uint256,uint256): 0xe450d38c"),
"{}",
output
);
});
// tests cast can decode event with provided signature
casttest!(event_decode_with_sig, |_prj, cmd| {
cmd.args(["decode-event", "--sig", "MyEvent(uint256,address)", "0x000000000000000000000000000000000000000000000000000000000000004e0000000000000000000000000000000000000000000000000000000000d0004f"]).assert_success().stdout_eq(str![[r#"
78
0x0000000000000000000000000000000000D0004F
"#]]);
cmd.args(["--json"]).assert_success().stdout_eq(str![[r#"
[
78,
"0x0000000000000000000000000000000000D0004F"
]
"#]]);
});
// tests cast can decode event with Openchain API
casttest!(event_decode_with_openchain, |prj, cmd| {
prj.clear_cache();
cmd.args(["decode-event", "0xe27c4c1372396a3d15a9922f74f9dfc7c72b1ad6d63868470787249c356454c1000000000000000000000000000000000000000000000000000000000000004e00000000000000000000000000000000000000000000000000000dd00000004e"]).assert_success().stdout_eq(str![[r#"
BaseCurrencySet(address,uint256)
0x000000000000000000000000000000000000004e
15187004358734 [1.518e13]
"#]]);
});
// tests cast can decode error with provided signature
casttest!(error_decode_with_sig, |_prj, cmd| {
cmd.args(["decode-error", "--sig", "AnotherValueTooHigh(uint256,address)", "0x7191bc6200000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000D0004F"]).assert_success().stdout_eq(str![[r#"
101
0x0000000000000000000000000000000000D0004F
"#]]);
cmd.args(["--json"]).assert_success().stdout_eq(str![[r#"
[
101,
"0x0000000000000000000000000000000000D0004F"
]
"#]]);
});
// tests cast can decode error and event when using local sig identifiers cache
forgetest_init!(error_event_decode_with_cache, |prj, cmd| {
prj.add_source(
"LocalProjectContract",
r#"
contract ContractWithCustomError {
error AnotherValueTooHigh(uint256, address);
event MyUniqueEventWithinLocalProject(uint256 a, address b);
}
"#,
);
// Build and cache project selectors.
cmd.forge_fuse().args(["build"]).assert_success();
cmd.forge_fuse().args(["selectors", "cache"]).assert_success();
// Assert cast can decode custom error with local cache.
cmd.cast_fuse()
.args(["decode-error", "0x7191bc6200000000000000000000000000000000000000000000000000000000000000650000000000000000000000000000000000000000000000000000000000D0004F"])
.assert_success()
.stdout_eq(str![[r#"
AnotherValueTooHigh(uint256,address)
101
0x0000000000000000000000000000000000D0004F
"#]]);
// Assert cast can decode event with local cache.
cmd.cast_fuse()
.args(["decode-event", "0xbd3699995dcc867b64dbb607be2c33be38df9134bef1178df13bfb9446e73104000000000000000000000000000000000000000000000000000000000000004e00000000000000000000000000000000000000000000000000000dd00000004e"])
.assert_success()
.stdout_eq(str![[r#"
MyUniqueEventWithinLocalProject(uint256,address)
78
0x00000000000000000000000000000DD00000004e
"#]]);
});
forgetest!(cache_selectors_from_extra_abis, |prj, cmd| {
// Create folder with ABI JSON files containing a unique error
let abis_dir = prj.root().join("external_abis");
std::fs::create_dir(&abis_dir).unwrap();
std::fs::write(
abis_dir.join("test.json"),
r#"[{
"type": "error",
"name": "MyUniqueExtraAbiError",
"inputs": [
{"name": "value", "type": "uint256"},
{"name": "flag", "type": "bool"}
]
}]"#,
)
.unwrap();
cmd.forge_fuse()
.args(["selectors", "cache", "--extra-abis-path", abis_dir.to_str().unwrap()])
.assert_success();
// Verify with cast decode-error (uses local cache via SignaturesIdentifier)
// Selector for MyUniqueExtraAbiError(uint256,bool) is 0x7819b107
// Encoded: selector + uint256(42) + bool(true)
cmd.cast_fuse()
.args(["decode-error", "0x7819b107000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000000001"])
.assert_success()
.stdout_eq(str![[r#"
MyUniqueExtraAbiError(uint256,bool)
42
true
"#]]);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/tests/cli/erc20.rs | crates/cast/tests/cli/erc20.rs | //! Contains various tests for checking cast erc20 subcommands
use alloy_primitives::U256;
use anvil::NodeConfig;
use foundry_test_utils::util::OutputExt;
mod anvil_const {
/// First Anvil account
pub const PK1: &str = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
pub const ADDR1: &str = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266";
/// Second Anvil account
pub const _PK2: &str = "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d";
pub const ADDR2: &str = "0x70997970C51812dc3A010C7d01b50e0d17dc79C8";
/// Contract address deploying from ADDR1 with nonce 0
pub const TOKEN: &str = "0x5FbDB2315678afecb367f032d93F642f64180aa3";
}
fn get_u256_from_cmd(cmd: &mut foundry_test_utils::TestCommand, args: &[&str]) -> U256 {
let output = cmd.cast_fuse().args(args).assert_success().get_output().stdout_lossy();
// Parse balance from output (format: "100000000000000000000 [1e20]")
output.split_whitespace().next().unwrap().parse().unwrap()
}
fn get_balance(
cmd: &mut foundry_test_utils::TestCommand,
token: &str,
address: &str,
rpc: &str,
) -> U256 {
get_u256_from_cmd(cmd, &["erc20", "balance", token, address, "--rpc-url", rpc])
}
fn get_allowance(
cmd: &mut foundry_test_utils::TestCommand,
token: &str,
owner: &str,
spender: &str,
rpc: &str,
) -> U256 {
get_u256_from_cmd(cmd, &["erc20", "allowance", token, owner, spender, "--rpc-url", rpc])
}
/// Helper function to deploy TestToken contract
fn deploy_test_token(
cmd: &mut foundry_test_utils::TestCommand,
rpc: &str,
private_key: &str,
) -> String {
cmd.args([
"create",
"--private-key",
private_key,
"--rpc-url",
rpc,
"--broadcast",
"src/TestToken.sol:TestToken",
])
.assert_success();
// Return the standard deployment address (nonce 0 from first account)
anvil_const::TOKEN.to_string()
}
/// Helper to setup anvil node and deploy test token
async fn setup_token_test(
prj: &foundry_test_utils::TestProject,
cmd: &mut foundry_test_utils::TestCommand,
) -> (String, String) {
let (_, handle) = anvil::spawn(NodeConfig::test()).await;
let rpc = handle.http_endpoint();
// Deploy TestToken contract
foundry_test_utils::util::initialize(prj.root());
prj.add_source("TestToken.sol", include_str!("../fixtures/TestToken.sol"));
let token = deploy_test_token(cmd, &rpc, anvil_const::PK1);
(rpc, token)
}
// tests that `balance` and `transfer` commands works correctly
forgetest_async!(erc20_transfer_approve_success, |prj, cmd| {
let (rpc, token) = setup_token_test(&prj, &mut cmd).await;
// Test constants
let transfer_amount = U256::from(100_000_000_000_000_000_000u128); // 100 tokens (18 decimals)
let initial_supply = U256::from(1_000_000_000_000_000_000_000u128); // 1000 tokens total supply
// Verify initial balances
let addr1_balance_before = get_balance(&mut cmd, &token, anvil_const::ADDR1, &rpc);
let addr2_balance_before = get_balance(&mut cmd, &token, anvil_const::ADDR2, &rpc);
assert_eq!(addr1_balance_before, initial_supply);
assert_eq!(addr2_balance_before, U256::ZERO);
// Test ERC20 transfer from ADDR1 to ADDR2
cmd.cast_fuse()
.args([
"erc20",
"transfer",
&token,
anvil_const::ADDR2,
&transfer_amount.to_string(),
"--rpc-url",
&rpc,
"--private-key",
anvil_const::PK1,
])
.assert_success();
// Verify balance changes after transfer
let addr1_balance_after = get_balance(&mut cmd, &token, anvil_const::ADDR1, &rpc);
let addr2_balance_after = get_balance(&mut cmd, &token, anvil_const::ADDR2, &rpc);
assert_eq!(addr1_balance_after, addr1_balance_before - transfer_amount);
assert_eq!(addr2_balance_after, addr2_balance_before + transfer_amount);
});
// tests that `approve` and `allowance` commands works correctly
forgetest_async!(erc20_approval_allowance, |prj, cmd| {
let (rpc, token) = setup_token_test(&prj, &mut cmd).await;
// ADDR1 approves ADDR2 to spend their tokens
let approve_amount = U256::from(50_000_000_000_000_000_000u128); // 50 tokens
cmd.cast_fuse()
.args([
"erc20",
"approve",
&token,
anvil_const::ADDR2,
&approve_amount.to_string(),
"--rpc-url",
&rpc,
"--private-key",
anvil_const::PK1,
])
.assert_success();
// Verify allowance was set
let allowance = get_allowance(&mut cmd, &token, anvil_const::ADDR1, anvil_const::ADDR2, &rpc);
assert_eq!(allowance, approve_amount);
});
// tests that `name`, `symbol`, `decimals`, and `totalSupply` commands work correctly
forgetest_async!(erc20_metadata_success, |prj, cmd| {
let (rpc, token) = setup_token_test(&prj, &mut cmd).await;
// Test name
let output = cmd
.cast_fuse()
.args(["erc20", "name", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
assert_eq!(output.trim(), "Test Token");
// Test symbol
let output = cmd
.cast_fuse()
.args(["erc20", "symbol", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
assert_eq!(output.trim(), "TEST");
// Test decimals
let output = cmd
.cast_fuse()
.args(["erc20", "decimals", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
assert_eq!(output.trim(), "18");
// Test totalSupply
let output = cmd
.cast_fuse()
.args(["erc20", "total-supply", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
let total_supply: U256 = output.split_whitespace().next().unwrap().parse().unwrap();
assert_eq!(total_supply, U256::from(1_000_000_000_000_000_000_000u128));
});
// tests that `mint` command works correctly
forgetest_async!(erc20_mint_success, |prj, cmd| {
let (rpc, token) = setup_token_test(&prj, &mut cmd).await;
let mint_amount = U256::from(500_000_000_000_000_000_000u128); // 500 tokens
let initial_supply = U256::from(1_000_000_000_000_000_000_000u128); // 1000 tokens
// Get initial balance and supply
let addr2_balance_before = get_balance(&mut cmd, &token, anvil_const::ADDR2, &rpc);
assert_eq!(addr2_balance_before, U256::ZERO);
// Mint tokens to ADDR2 (only owner can mint)
cmd.cast_fuse()
.args([
"erc20",
"mint",
&token,
anvil_const::ADDR2,
&mint_amount.to_string(),
"--rpc-url",
&rpc,
"--private-key",
anvil_const::PK1, // PK1 is the owner/deployer
])
.assert_success();
// Verify balance increased
let addr2_balance_after = get_balance(&mut cmd, &token, anvil_const::ADDR2, &rpc);
assert_eq!(addr2_balance_after, mint_amount);
// Verify totalSupply increased
let output = cmd
.cast_fuse()
.args(["erc20", "total-supply", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
let total_supply: U256 = output.split_whitespace().next().unwrap().parse().unwrap();
assert_eq!(total_supply, initial_supply + mint_amount);
});
// tests that `burn` command works correctly
forgetest_async!(erc20_burn_success, |prj, cmd| {
let (rpc, token) = setup_token_test(&prj, &mut cmd).await;
let burn_amount = U256::from(200_000_000_000_000_000_000u128); // 200 tokens
let initial_supply = U256::from(1_000_000_000_000_000_000_000u128); // 1000 tokens
// Get initial balance
let addr1_balance_before = get_balance(&mut cmd, &token, anvil_const::ADDR1, &rpc);
assert_eq!(addr1_balance_before, initial_supply);
// Burn tokens from ADDR1
cmd.cast_fuse()
.args([
"erc20",
"burn",
&token,
&burn_amount.to_string(),
"--rpc-url",
&rpc,
"--private-key",
anvil_const::PK1,
])
.assert_success();
// Verify balance decreased
let addr1_balance_after = get_balance(&mut cmd, &token, anvil_const::ADDR1, &rpc);
assert_eq!(addr1_balance_after, addr1_balance_before - burn_amount);
// Verify totalSupply decreased
let output = cmd
.cast_fuse()
.args(["erc20", "total-supply", &token, "--rpc-url", &rpc])
.assert_success()
.get_output()
.stdout_lossy();
let total_supply: U256 = output.split_whitespace().next().unwrap().parse().unwrap();
assert_eq!(total_supply, initial_supply - burn_amount);
});
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/tests/cli/main.rs | crates/cast/tests/cli/main.rs | //! Contains various tests for checking cast commands
use alloy_chains::NamedChain;
use alloy_hardforks::EthereumHardfork;
use alloy_network::{TransactionBuilder, TransactionResponse};
use alloy_primitives::{B256, Bytes, U256, address, b256, hex};
use alloy_provider::{Provider, ProviderBuilder};
use alloy_rpc_types::{Authorization, BlockNumberOrTag, Index, TransactionRequest};
use alloy_signer::Signer;
use alloy_signer_local::PrivateKeySigner;
use anvil::NodeConfig;
use foundry_test_utils::{
rpc::{
next_etherscan_api_key, next_http_archive_rpc_url, next_http_rpc_endpoint,
next_rpc_endpoint, next_ws_rpc_endpoint,
},
snapbox::IntoData as _,
str,
util::OutputExt,
};
use serde_json::json;
use std::{fs, path::Path, str::FromStr};
#[macro_use]
extern crate foundry_test_utils;
mod erc20;
mod selectors;
casttest!(print_short_version, |_prj, cmd| {
cmd.arg("-V").assert_success().stdout_eq(str![[r#"
cast [..]-[..] ([..] [..])
"#]]);
});
casttest!(print_long_version, |_prj, cmd| {
cmd.arg("--version").assert_success().stdout_eq(str![[r#"
cast Version: [..]
Commit SHA: [..]
Build Timestamp: [..]
Build Profile: [..]
"#]]);
});
// tests `--help` is printed to std out
casttest!(print_help, |_prj, cmd| {
cmd.arg("--help").assert_success().stdout_eq(str![[r#"
A Swiss Army knife for interacting with Ethereum applications from the command line
Usage: cast[..] <COMMAND>
Commands:
...
Options:
-h, --help
Print help (see a summary with '-h')
-j, --threads <THREADS>
Number of threads to use. Specifying 0 defaults to the number of logical cores
...
[aliases: --jobs]
-V, --version
Print version
Display options:
--color <COLOR>
The color of the log messages
Possible values:
- auto: Intelligently guess whether to use color output (default)
- always: Force color output
- never: Force disable color output
--json
Format log messages as JSON
--md
Format log messages as Markdown
-q, --quiet
Do not print log messages
-v, --verbosity...
Verbosity level of the log messages.
...
Pass multiple times to increase the verbosity (e.g. -v, -vv, -vvv).
...
Depending on the context the verbosity levels have different meanings.
...
For example, the verbosity levels of the EVM are:
- 2 (-vv): Print logs for all tests.
- 3 (-vvv): Print execution traces for failing tests.
- 4 (-vvvv): Print execution traces for all tests, and setup traces for failing tests.
- 5 (-vvvvv): Print execution and setup traces for all tests, including storage changes
and
backtraces with line numbers.
Find more information in the book: https://getfoundry.sh/cast/overview
"#]]);
});
// tests that the `cast block` command works correctly
casttest!(latest_block, |_prj, cmd| {
let eth_rpc_url = next_http_rpc_endpoint();
// Call `cast find-block`
cmd.args(["block", "latest", "--rpc-url", eth_rpc_url.as_str()]);
cmd.assert_success().stdout_eq(str![[r#"
baseFeePerGas [..]
difficulty [..]
extraData [..]
gasLimit [..]
gasUsed [..]
hash [..]
logsBloom [..]
miner [..]
mixHash [..]
nonce [..]
number [..]
parentHash [..]
parentBeaconRoot [..]
transactionsRoot [..]
receiptsRoot [..]
sha3Uncles [..]
size [..]
stateRoot [..]
timestamp [..]
withdrawalsRoot [..]
totalDifficulty [..]
blobGasUsed [..]
excessBlobGas [..]
requestsHash [..]
transactions: [
...
]
"#]]);
// <https://etherscan.io/block/15007840>
cmd.cast_fuse().args([
"block",
"15007840",
"-f",
"hash,timestamp",
"--rpc-url",
eth_rpc_url.as_str(),
]);
cmd.assert_success().stdout_eq(str![[r#"
0x950091817a57e22b6c1f3b951a15f52d41ac89b299cc8f9c89bb6d185f80c415
1655904485
"#]]);
});
casttest!(block_raw, |_prj, cmd| {
let eth_rpc_url = next_http_rpc_endpoint();
let output = cmd
.args(["block", "22934900", "--rpc-url", eth_rpc_url.as_str(), "--raw"])
.assert_success()
.get_output()
.stdout_lossy()
.trim()
.to_string();
// Hash the output with keccak256
let hash = alloy_primitives::keccak256(hex::decode(output).unwrap());
// Verify the Mainnet's block #22934900 header hash equals the expected value
// obtained with go-ethereum's `block.Header().Hash()` method
assert_eq!(
hash.to_string(),
"0x49fd7f3b9ba5d67fa60197027f09454d4cac945e8f271edcc84c3fd5872446d3"
);
});
// tests that the `cast find-block` command works correctly
casttest!(finds_block, |_prj, cmd| {
// Construct args
let timestamp = "1647843609".to_string();
let eth_rpc_url = next_http_rpc_endpoint();
// Call `cast find-block`
// <https://etherscan.io/block/14428082>
cmd.args(["find-block", "--rpc-url", eth_rpc_url.as_str(), ×tamp])
.assert_success()
.stdout_eq(str![[r#"
14428082
"#]]);
});
// tests that we can create a new wallet
casttest!(new_wallet, |_prj, cmd| {
cmd.args(["wallet", "new"]).assert_success().stdout_eq(str![[r#"
Successfully created new keypair.
[ADDRESS]
[PRIVATE_KEY]
"#]]);
});
// tests that we can create a new wallet (verbose variant)
casttest!(new_wallet_verbose, |_prj, cmd| {
cmd.args(["wallet", "new", "-v"]).assert_success().stdout_eq(str![[r#"
Successfully created new keypair.
[ADDRESS]
[PUBLIC_KEY]
[PRIVATE_KEY]
"#]]);
});
// tests that we can create a new wallet with json output
casttest!(new_wallet_json, |_prj, cmd| {
cmd.args(["wallet", "new", "--json"]).assert_success().stdout_eq(
str![[r#"
[
{
"address": "{...}",
"private_key": "{...}"
}
]
"#]]
.is_json(),
);
});
// tests that we can create a new wallet with json output (verbose variant)
casttest!(new_wallet_json_verbose, |_prj, cmd| {
cmd.args(["wallet", "new", "--json", "-v"]).assert_success().stdout_eq(
str![[r#"
[
{
"address": "{...}",
"public_key": "{...}",
"private_key": "{...}"
}
]
"#]]
.is_json(),
);
});
// tests that we can create a new wallet with keystore
casttest!(new_wallet_keystore_with_password, |_prj, cmd| {
cmd.args(["wallet", "new", ".", "test-account", "--unsafe-password", "test"])
.assert_success()
.stdout_eq(str![[r#"
Created new encrypted keystore file: [..]
[ADDRESS]
"#]]);
});
// tests that we can create a new wallet with keystore (verbose variant)
casttest!(new_wallet_keystore_with_password_verbose, |_prj, cmd| {
cmd.args(["wallet", "new", ".", "test-account", "--unsafe-password", "test", "-v"])
.assert_success()
.stdout_eq(str![[r#"
Created new encrypted keystore file: [..]
[ADDRESS]
[PUBLIC_KEY]
"#]]);
});
// tests that we can create a new wallet with default keystore location
casttest!(new_wallet_default_keystore, |_prj, cmd| {
cmd.args(["wallet", "new", "--unsafe-password", "test"]).assert_success().stdout_eq(str![[
r#"
Created new encrypted keystore file: [..]
[ADDRESS]
"#
]]);
// Verify the default keystore directory was created
let keystore_path = dirs::home_dir().unwrap().join(".foundry").join("keystores");
assert!(keystore_path.exists());
assert!(keystore_path.is_dir());
});
// tests that we can outputting multiple keys without a keystore path
casttest!(new_wallet_multiple_keys, |_prj, cmd| {
cmd.args(["wallet", "new", "-n", "2"]).assert_success().stdout_eq(str![[r#"
Successfully created new keypair.
[ADDRESS]
[PRIVATE_KEY]
Successfully created new keypair.
[ADDRESS]
[PRIVATE_KEY]
"#]]);
});
// tests that we can get the address of a keystore file
casttest!(wallet_address_keystore_with_password_file, |_prj, cmd| {
let keystore_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("tests/fixtures/keystore");
cmd.args([
"wallet",
"address",
"--keystore",
keystore_dir
.join("UTC--2022-12-20T10-30-43.591916000Z--ec554aeafe75601aaab43bd4621a22284db566c2")
.to_str()
.unwrap(),
"--password-file",
keystore_dir.join("password-ec554").to_str().unwrap(),
])
.assert_success()
.stdout_eq(str![[r#"
0xeC554aeAFE75601AaAb43Bd4621A22284dB566C2
"#]]);
});
// tests that `cast wallet remove` can successfully remove a keystore file and validates password
casttest!(wallet_remove_keystore_with_unsafe_password, |prj, cmd| {
let keystore_path = prj.root().join("keystore");
cmd.set_current_dir(prj.root());
let account_name = "testAccount";
// Default Anvil private key
let test_private_key =
b256!("0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80");
// import private key
cmd.cast_fuse()
.args([
"wallet",
"import",
account_name,
"--private-key",
&test_private_key.to_string(),
"-k",
"keystore",
"--unsafe-password",
"test",
])
.assert_success()
.stdout_eq(str![[r#"
`testAccount` keystore was saved successfully. [ADDRESS]
"#]]);
// check that the keystore file was created
let keystore_file = keystore_path.join(account_name);
assert!(keystore_file.exists());
// Remove the wallet
cmd.cast_fuse()
.args([
"wallet",
"remove",
"--name",
account_name,
"--dir",
keystore_path.to_str().unwrap(),
"--unsafe-password",
"test",
])
.assert_success()
.stdout_eq(str![[r#"
`testAccount` keystore was removed successfully.
"#]]);
assert!(!keystore_file.exists());
});
// tests that `cast wallet sign message` outputs the expected signature
casttest!(wallet_sign_message_utf8_data, |_prj, cmd| {
let pk = "0x0000000000000000000000000000000000000000000000000000000000000001";
let address = "0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf";
let msg = "test";
let expected = "0xfe28833983d6faa0715c7e8c3873c725ddab6fa5bf84d40e780676e463e6bea20fc6aea97dc273a98eb26b0914e224c8dd5c615ceaab69ddddcf9b0ae3de0e371c";
cmd.args(["wallet", "sign", "--private-key", pk, msg]).assert_success().stdout_eq(str![[r#"
0xfe28833983d6faa0715c7e8c3873c725ddab6fa5bf84d40e780676e463e6bea20fc6aea97dc273a98eb26b0914e224c8dd5c615ceaab69ddddcf9b0ae3de0e371c
"#]]);
// Success.
cmd.cast_fuse()
.args(["wallet", "verify", "-a", address, msg, expected])
.assert_success()
.stdout_eq(str![[r#"
Validation succeeded. Address 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf signed this message.
"#]]);
// Fail.
cmd.cast_fuse()
.args(["wallet", "verify", "-a", address, "other msg", expected])
.assert_failure()
.stderr_eq(str![[r#"
Error: Validation failed. Address 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf did not sign this message.
"#]]);
});
// tests that `cast wallet sign message` outputs the expected signature, given a 0x-prefixed data
casttest!(wallet_sign_message_hex_data, |_prj, cmd| {
cmd.args([
"wallet",
"sign",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"0x0000000000000000000000000000000000000000000000000000000000000000",
]).assert_success().stdout_eq(str![[r#"
0x23a42ca5616ee730ff3735890c32fc7b9491a9f633faca9434797f2c845f5abf4d9ba23bd7edb8577acebaa3644dc5a4995296db420522bb40060f1693c33c9b1c
"#]]);
});
// <https://github.com/foundry-rs/foundry/issues/10613>
// tests that `cast wallet sign` and `cast wallet verify` work with the same message as input
casttest!(wallet_sign_and_verify_message_hex_data, |_prj, cmd| {
// message="$1"
// mnemonic="test test test test test test test test test test test junk"
// key=$(cast wallet private-key --mnemonic "$mnemonic")
// address=$(cast wallet address --mnemonic "$mnemonic")
// signature=$(cast wallet sign --private-key "$key" "$message")
// cast wallet verify --address "$address" "$message" "$signature"
let mnemonic = "test test test test test test test test test test test junk";
let key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
let address = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266";
cmd.args(["wallet", "private-key", "--mnemonic", mnemonic]).assert_success().stdout_eq(str![[
r#"
0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
"#
]]);
cmd.cast_fuse().args(["wallet", "address", "--mnemonic", mnemonic]).assert_success().stdout_eq(
str![[r#"
0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
"#]],
);
let msg_hex = "0x0000000000000000000000000000000000000000000000000000000000000001";
let signature_hex = "0xed769da87f78d0166b30aebf2767ceed5a3867da21b2fba8c6527af256bbcebe24a1e758ec8ad1ffc29cfefa540ea7ba7966c0edf6907af82348f894ba4f40fa1b";
cmd.cast_fuse().args([
"wallet", "sign", "--private-key",key, msg_hex
]).assert_success().stdout_eq(str![[r#"
0xed769da87f78d0166b30aebf2767ceed5a3867da21b2fba8c6527af256bbcebe24a1e758ec8ad1ffc29cfefa540ea7ba7966c0edf6907af82348f894ba4f40fa1b
"#]]);
cmd.cast_fuse()
.args(["wallet", "verify", "--address", address, msg_hex, signature_hex])
.assert_success()
.stdout_eq(str![[r#"
Validation succeeded. Address 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 signed this message.
"#]]);
let msg_raw = "0000000000000000000000000000000000000000000000000000000000000001";
let signature_raw = "0x27a97b378477d9d004bd19cbd838d59bbb9847074ae4cc5b5975cc5566065eea76ee5b752fcdd483073e1baba548d82d9accc8603b3781bcc9abf195614cd3411c";
cmd.cast_fuse().args([
"wallet", "sign", "--private-key",key, msg_raw
]).assert_success().stdout_eq(str![[r#"
0x27a97b378477d9d004bd19cbd838d59bbb9847074ae4cc5b5975cc5566065eea76ee5b752fcdd483073e1baba548d82d9accc8603b3781bcc9abf195614cd3411c
"#]]);
cmd.cast_fuse()
.args(["wallet", "verify", "--address", address, msg_raw, signature_raw])
.assert_success()
.stdout_eq(str![[r#"
Validation succeeded. Address 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266 signed this message.
"#]]);
});
// tests that `cast wallet sign typed-data` outputs the expected signature, given a JSON string
casttest!(wallet_sign_typed_data_string, |_prj, cmd| {
cmd.args([
"wallet",
"sign",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"--data",
"{\"types\": {\"EIP712Domain\": [{\"name\": \"name\",\"type\": \"string\"},{\"name\": \"version\",\"type\": \"string\"},{\"name\": \"chainId\",\"type\": \"uint256\"},{\"name\": \"verifyingContract\",\"type\": \"address\"}],\"Message\": [{\"name\": \"data\",\"type\": \"string\"}]},\"primaryType\": \"Message\",\"domain\": {\"name\": \"example.metamask.io\",\"version\": \"1\",\"chainId\": \"1\",\"verifyingContract\": \"0x0000000000000000000000000000000000000000\"},\"message\": {\"data\": \"Hello!\"}}",
]).assert_success().stdout_eq(str![[r#"
0x06c18bdc8163219fddc9afaf5a0550e381326474bb757c86dc32317040cf384e07a2c72ce66c1a0626b6750ca9b6c035bf6f03e7ed67ae2d1134171e9085c0b51b
"#]]);
});
// tests that `cast wallet sign typed-data` outputs the expected signature, given a JSON file
casttest!(wallet_sign_typed_data_file, |_prj, cmd| {
cmd.args([
"wallet",
"sign",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"--data",
"--from-file",
Path::new(env!("CARGO_MANIFEST_DIR"))
.join("tests/fixtures/sign_typed_data.json")
.into_os_string()
.into_string()
.unwrap()
.as_str(),
]).assert_success().stdout_eq(str![[r#"
0x06c18bdc8163219fddc9afaf5a0550e381326474bb757c86dc32317040cf384e07a2c72ce66c1a0626b6750ca9b6c035bf6f03e7ed67ae2d1134171e9085c0b51b
"#]]);
});
// tests that `cast wallet sign typed-data` passes with type names containing colons
// <https://github.com/foundry-rs/foundry/issues/10765>
casttest!(wallet_sign_typed_data_with_colon_succeeds, |_prj, cmd| {
let typed_data_with_colon = r#"{
"types": {
"EIP712Domain": [
{"name": "name", "type": "string"},
{"name": "version", "type": "string"},
{"name": "chainId", "type": "uint256"},
{"name": "verifyingContract", "type": "address"}
],
"Test:Message": [
{"name": "content", "type": "string"}
]
},
"primaryType": "Test:Message",
"domain": {
"name": "TestDomain",
"version": "1",
"chainId": 1,
"verifyingContract": "0x0000000000000000000000000000000000000000"
},
"message": {
"content": "Hello"
}
}"#;
cmd.args([
"wallet",
"sign",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"--data",
typed_data_with_colon,
]).assert_success().stdout_eq(str![[r#"
0xf91c67e845a4d468d1f876f457ffa01e65468641fc121453705242d21de39b266c278592b085814ab1e9adc938cc26b1d64bb61f80b437df077777c4283612291b
"#]]);
});
// tests that the same data without colon works correctly
// <https://github.com/foundry-rs/foundry/issues/10765>
casttest!(wallet_sign_typed_data_without_colon_works, |_prj, cmd| {
let typed_data_without_colon = r#"{
"types": {
"EIP712Domain": [
{"name": "name", "type": "string"},
{"name": "version", "type": "string"},
{"name": "chainId", "type": "uint256"},
{"name": "verifyingContract", "type": "address"}
],
"TestMessage": [
{"name": "content", "type": "string"}
]
},
"primaryType": "TestMessage",
"domain": {
"name": "TestDomain",
"version": "1",
"chainId": 1,
"verifyingContract": "0x0000000000000000000000000000000000000000"
},
"message": {
"content": "Hello"
}
}"#;
cmd.args([
"wallet",
"sign",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"--data",
typed_data_without_colon,
])
.assert_success();
});
// tests that `cast wallet sign-auth message` outputs the expected signature
casttest!(wallet_sign_auth, |_prj, cmd| {
cmd.args([
"wallet",
"sign-auth",
"--private-key",
"0x0000000000000000000000000000000000000000000000000000000000000001",
"--nonce",
"100",
"--chain",
"1",
"0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf"]).assert_success().stdout_eq(str![[r#"
0xf85a01947e5f4552091a69125d5dfcb7b8c2659029395bdf6401a0ad489ee0314497c3f06567f3080a46a63908edc1c7cdf2ac2d609ca911212086a065a6ba951c8748dd8634740fe498efb61770097d99ff5fdcb9a863b62ea899f6
"#]]);
});
// tests that `cast wallet sign-auth --self-broadcast` uses nonce + 1
casttest!(wallet_sign_auth_self_broadcast, async |_prj, cmd| {
use alloy_rlp::Decodable;
use alloy_signer_local::PrivateKeySigner;
let (_, handle) =
anvil::spawn(NodeConfig::test().with_hardfork(Some(EthereumHardfork::Prague.into()))).await;
let endpoint = handle.http_endpoint();
let private_key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80";
let signer: PrivateKeySigner = private_key.parse().unwrap();
let signer_address = signer.address();
let delegate_address = address!("0x70997970C51812dc3A010C7d01b50e0d17dc79C8");
// Get the current nonce from the RPC
let provider = ProviderBuilder::new().connect_http(endpoint.parse().unwrap());
let current_nonce = provider.get_transaction_count(signer_address).await.unwrap();
// First, get the auth without --self-broadcast (should use current nonce)
let output_normal = cmd
.args([
"wallet",
"sign-auth",
"--private-key",
private_key,
"--rpc-url",
&endpoint,
&delegate_address.to_string(),
])
.assert_success()
.get_output()
.stdout_lossy()
.trim()
.to_string();
// Then, get the auth with --self-broadcast (should use current nonce + 1)
let output_self_broadcast = cmd
.cast_fuse()
.args([
"wallet",
"sign-auth",
"--private-key",
private_key,
"--rpc-url",
&endpoint,
"--self-broadcast",
&delegate_address.to_string(),
])
.assert_success()
.get_output()
.stdout_lossy()
.trim()
.to_string();
// The outputs should be different due to different nonces
assert_ne!(
output_normal, output_self_broadcast,
"self-broadcast should produce different signature due to nonce + 1"
);
// Decode the RLP to verify the nonces
let normal_bytes = hex::decode(output_normal.strip_prefix("0x").unwrap()).unwrap();
let self_broadcast_bytes =
hex::decode(output_self_broadcast.strip_prefix("0x").unwrap()).unwrap();
let normal_auth =
alloy_eips::eip7702::SignedAuthorization::decode(&mut normal_bytes.as_slice()).unwrap();
let self_broadcast_auth =
alloy_eips::eip7702::SignedAuthorization::decode(&mut self_broadcast_bytes.as_slice())
.unwrap();
assert_eq!(normal_auth.nonce(), current_nonce, "normal auth should have current nonce");
assert_eq!(
self_broadcast_auth.nonce(),
current_nonce + 1,
"self-broadcast auth should have current nonce + 1"
);
});
// tests that `cast wallet list` outputs the local accounts
casttest!(wallet_list_local_accounts, |prj, cmd| {
let keystore_path = prj.root().join("keystore");
fs::create_dir_all(keystore_path).unwrap();
cmd.set_current_dir(prj.root());
// empty results
cmd.cast_fuse()
.args(["wallet", "list", "--dir", "keystore"])
.assert_success()
.stdout_eq(str![""]);
// create 10 wallets
cmd.cast_fuse()
.args(["wallet", "new", "keystore", "-n", "10", "--unsafe-password", "test"])
.assert_success()
.stdout_eq(str![[r#"
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
Created new encrypted keystore file: [..]
[ADDRESS]
"#]]);
// test list new wallet
cmd.cast_fuse().args(["wallet", "list", "--dir", "keystore"]).assert_success().stdout_eq(str![
[r#"
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
[..] (Local)
"#]
]);
});
// tests that `cast wallet new-mnemonic --entropy` outputs the expected mnemonic
casttest!(wallet_mnemonic_from_entropy, |_prj, cmd| {
cmd.args([
"wallet",
"new-mnemonic",
"--accounts",
"3",
"--entropy",
"0xdf9bf37e6fcdf9bf37e6fcdf9bf37e3c",
])
.assert_success()
.stdout_eq(
str![[r#"
Generating mnemonic from provided entropy...
Successfully generated a new mnemonic.
Phrase:
test test test test test test test test test test test junk
Accounts:
- Account 0:
Address: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Private key: 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
- Account 1:
Address: 0x70997970C51812dc3A010C7d01b50e0d17dc79C8
Private key: 0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
- Account 2:
Address: 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC
Private key: 0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a
"#]]
.raw(),
);
});
// tests that `cast wallet new-mnemonic --entropy` outputs the expected mnemonic (verbose variant)
casttest!(wallet_mnemonic_from_entropy_verbose, |_prj, cmd| {
cmd.args([
"wallet",
"new-mnemonic",
"--accounts",
"3",
"--entropy",
"0xdf9bf37e6fcdf9bf37e6fcdf9bf37e3c",
"-v",
])
.assert_success()
.stdout_eq(
str![[r#"
Generating mnemonic from provided entropy...
Successfully generated a new mnemonic.
Phrase:
test test test test test test test test test test test junk
Accounts:
- Account 0:
Address: 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266
Public key: 0x8318535b54105d4a7aae60c08fc45f9687181b4fdfc625bd1a753fa7397fed753547f11ca8696646f2f3acb08e31016afac23e630c5d11f59f61fef57b0d2aa5
Private key: 0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80
- Account 1:
Address: 0x70997970C51812dc3A010C7d01b50e0d17dc79C8
Public key: 0xba5734d8f7091719471e7f7ed6b9df170dc70cc661ca05e688601ad984f068b0d67351e5f06073092499336ab0839ef8a521afd334e53807205fa2f08eec74f4
Private key: 0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
- Account 2:
Address: 0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC
Public key: 0x9d9031e97dd78ff8c15aa86939de9b1e791066a0224e331bc962a2099a7b1f0464b8bbafe1535f2301c72c2cb3535b172da30b02686ab0393d348614f157fbdb
Private key: 0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a
"#]]
.raw(),
);
});
// tests that `cast wallet new-mnemonic --json` outputs the expected mnemonic
casttest!(wallet_mnemonic_from_entropy_json, |_prj, cmd| {
cmd.args([
"wallet",
"new-mnemonic",
"--accounts",
"3",
"--entropy",
"0xdf9bf37e6fcdf9bf37e6fcdf9bf37e3c",
"--json",
])
.assert_success()
.stdout_eq(str![[r#"
{
"mnemonic": "test test test test test test test test test test test junk",
"accounts": [
{
"address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266",
"private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
},
{
"address": "0x70997970C51812dc3A010C7d01b50e0d17dc79C8",
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
},
{
"address": "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC",
"private_key": "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
}
]
}
"#]]);
});
// tests that `cast wallet new-mnemonic --json` outputs the expected mnemonic (verbose variant)
casttest!(wallet_mnemonic_from_entropy_json_verbose, |_prj, cmd| {
cmd.args([
"wallet",
"new-mnemonic",
"--accounts",
"3",
"--entropy",
"0xdf9bf37e6fcdf9bf37e6fcdf9bf37e3c",
"--json",
"-v",
])
.assert_success()
.stdout_eq(str![[r#"
{
"mnemonic": "test test test test test test test test test test test junk",
"accounts": [
{
"address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266",
"public_key": "0x8318535b54105d4a7aae60c08fc45f9687181b4fdfc625bd1a753fa7397fed753547f11ca8696646f2f3acb08e31016afac23e630c5d11f59f61fef57b0d2aa5",
"private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
},
{
"address": "0x70997970C51812dc3A010C7d01b50e0d17dc79C8",
"public_key": "0xba5734d8f7091719471e7f7ed6b9df170dc70cc661ca05e688601ad984f068b0d67351e5f06073092499336ab0839ef8a521afd334e53807205fa2f08eec74f4",
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
},
{
"address": "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC",
"public_key": "0x9d9031e97dd78ff8c15aa86939de9b1e791066a0224e331bc962a2099a7b1f0464b8bbafe1535f2301c72c2cb3535b172da30b02686ab0393d348614f157fbdb",
"private_key": "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
}
]
}
"#]]);
});
// tests that `cast wallet derive` outputs the addresses of the accounts derived from the mnemonic
casttest!(wallet_derive_mnemonic, |_prj, cmd| {
cmd.args([
"wallet",
"derive",
"--accounts",
"3",
"test test test test test test test test test test test junk",
])
.assert_success()
.stdout_eq(str![[r#"
- Account 0:
[ADDRESS]
- Account 1:
[ADDRESS]
- Account 2:
[ADDRESS]
"#]]);
});
// tests that `cast wallet derive` with insecure flag outputs the addresses and private keys of the
// accounts derived from the mnemonic
casttest!(wallet_derive_mnemonic_insecure, |_prj, cmd| {
cmd.args([
"wallet",
"derive",
"--accounts",
"3",
"--insecure",
"test test test test test test test test test test test junk",
])
.assert_success()
.stdout_eq(str![[r#"
- Account 0:
[ADDRESS]
[PRIVATE_KEY]
- Account 1:
[ADDRESS]
[PRIVATE_KEY]
- Account 2:
[ADDRESS]
[PRIVATE_KEY]
"#]]);
});
// tests that `cast wallet derive` with json flag outputs the addresses of the accounts derived from
// the mnemonic in JSON format
casttest!(wallet_derive_mnemonic_json, |_prj, cmd| {
cmd.args([
"wallet",
"derive",
"--accounts",
"3",
"--json",
"test test test test test test test test test test test junk",
])
.assert_success()
.stdout_eq(str![[r#"
[
{
"address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
},
{
"address": "0x70997970C51812dc3A010C7d01b50e0d17dc79C8"
},
{
"address": "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC"
}
]
"#]]);
});
// tests that `cast wallet derive` with insecure and json flag outputs the addresses and private
// keys of the accounts derived from the mnemonic in JSON format
casttest!(wallet_derive_mnemonic_insecure_json, |_prj, cmd| {
cmd.args([
"wallet",
"derive",
"--accounts",
"3",
"--insecure",
"--json",
"test test test test test test test test test test test junk",
])
.assert_success()
.stdout_eq(str![[r#"
[
{
"address": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266",
"private_key": "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
},
{
"address": "0x70997970C51812dc3A010C7d01b50e0d17dc79C8",
"private_key": "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
},
{
"address": "0x3C44CdDdB6a900fa2b585dd299e03d12FA4293BC",
"private_key": "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a"
}
]
"#]]);
});
// tests that `cast wallet private-key` with arguments outputs the private key
casttest!(wallet_private_key_from_mnemonic_arg, |_prj, cmd| {
cmd.args([
"wallet",
"private-key",
"test test test test test test test test test test test junk",
"1",
])
.assert_success()
.stdout_eq(str![[r#"
0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
"#]]);
});
// tests that `cast wallet private-key` with options outputs the private key
casttest!(wallet_private_key_from_mnemonic_option, |_prj, cmd| {
cmd.args([
"wallet",
"private-key",
"--mnemonic",
"test test test test test test test test test test test junk",
"--mnemonic-index",
"1",
])
.assert_success()
.stdout_eq(str![[r#"
0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d
"#]]);
});
// tests that `cast wallet public-key` correctly derives and outputs the public key
casttest!(wallet_public_key_with_private_key, |_prj, cmd| {
cmd.args([
"wallet",
"public-key",
"--raw-private-key",
"0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d"
])
.assert_success()
.stdout_eq(str![[r#"
0xba5734d8f7091719471e7f7ed6b9df170dc70cc661ca05e688601ad984f068b0d67351e5f06073092499336ab0839ef8a521afd334e53807205fa2f08eec74f4
"#]]);
});
// tests that `cast wallet private-key` with derivation path outputs the private key
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/cast/bin/main.rs | crates/cast/bin/main.rs | //! The `cast` CLI: a Swiss Army knife for interacting with EVM smart contracts, sending
//! transactions and getting chain data.
use cast::args::run;
#[global_allocator]
static ALLOC: foundry_cli::utils::Allocator = foundry_cli::utils::new_allocator();
fn main() {
if let Err(err) = run() {
let _ = foundry_common::sh_err!("{err:?}");
std::process::exit(1);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/signer.rs | crates/wallets/src/signer.rs | use crate::{error::WalletSignerError, wallet_browser::signer::BrowserSigner};
use alloy_consensus::{Sealed, SignableTransaction};
use alloy_dyn_abi::TypedData;
use alloy_network::{NetworkWallet, TransactionBuilder, TxSigner};
use alloy_primitives::{Address, B256, ChainId, Signature, hex};
use alloy_signer::Signer;
use alloy_signer_ledger::{HDPath as LedgerHDPath, LedgerSigner};
use alloy_signer_local::{MnemonicBuilder, PrivateKeySigner, coins_bip39::English};
use alloy_signer_trezor::{HDPath as TrezorHDPath, TrezorSigner};
use alloy_sol_types::{Eip712Domain, SolStruct};
use async_trait::async_trait;
use foundry_primitives::{
FoundryNetwork, FoundryTransactionRequest, FoundryTxEnvelope, FoundryTypedTx,
};
use std::{collections::HashSet, path::PathBuf, time::Duration};
use tempo_primitives::TempoSignature;
use tracing::warn;
#[cfg(feature = "aws-kms")]
use alloy_signer_aws::{AwsSigner, aws_config::BehaviorVersion, aws_sdk_kms::Client as AwsClient};
#[cfg(feature = "gcp-kms")]
use alloy_signer_gcp::{
GcpKeyRingRef, GcpSigner, GcpSignerError, KeySpecifier,
gcloud_sdk::{
GoogleApi,
google::cloud::kms::v1::key_management_service_client::KeyManagementServiceClient,
},
};
#[cfg(feature = "turnkey")]
use alloy_signer_turnkey::TurnkeySigner;
pub type Result<T> = std::result::Result<T, WalletSignerError>;
/// Wrapper enum around different signers.
#[derive(Debug)]
pub enum WalletSigner {
/// Wrapper around local wallet. e.g. private key, mnemonic
Local(PrivateKeySigner),
/// Wrapper around Ledger signer.
Ledger(LedgerSigner),
/// Wrapper around Trezor signer.
Trezor(TrezorSigner),
/// Wrapper around browser wallet.
Browser(BrowserSigner),
/// Wrapper around AWS KMS signer.
#[cfg(feature = "aws-kms")]
Aws(AwsSigner),
/// Wrapper around Google Cloud KMS signer.
#[cfg(feature = "gcp-kms")]
Gcp(GcpSigner),
/// Wrapper around Turnkey signer.
#[cfg(feature = "turnkey")]
Turnkey(TurnkeySigner),
}
impl WalletSigner {
pub async fn from_ledger_path(path: LedgerHDPath) -> Result<Self> {
let ledger = LedgerSigner::new(path, None).await?;
Ok(Self::Ledger(ledger))
}
pub async fn from_trezor_path(path: TrezorHDPath) -> Result<Self> {
let trezor = TrezorSigner::new(path, None).await?;
Ok(Self::Trezor(trezor))
}
pub async fn from_browser(
port: u16,
open_browser: bool,
browser_development: bool,
) -> Result<Self> {
let browser_signer =
BrowserSigner::new(port, open_browser, Duration::from_secs(300), browser_development)
.await
.map_err(|e| WalletSignerError::Browser(e.into()))?;
Ok(Self::Browser(browser_signer))
}
pub async fn from_aws(key_id: String) -> Result<Self> {
#[cfg(feature = "aws-kms")]
{
let config =
alloy_signer_aws::aws_config::load_defaults(BehaviorVersion::latest()).await;
let client = AwsClient::new(&config);
Ok(Self::Aws(
AwsSigner::new(client, key_id, None)
.await
.map_err(|e| WalletSignerError::Aws(Box::new(e)))?,
))
}
#[cfg(not(feature = "aws-kms"))]
{
let _ = key_id;
Err(WalletSignerError::aws_unsupported())
}
}
pub async fn from_gcp(
project_id: String,
location: String,
keyring: String,
key_name: String,
key_version: u64,
) -> Result<Self> {
#[cfg(feature = "gcp-kms")]
{
let keyring = GcpKeyRingRef::new(&project_id, &location, &keyring);
let client = match GoogleApi::from_function(
KeyManagementServiceClient::new,
"https://cloudkms.googleapis.com",
None,
)
.await
{
Ok(c) => c,
Err(e) => {
return Err(WalletSignerError::Gcp(Box::new(GcpSignerError::GoogleKmsError(
e,
))));
}
};
let specifier = KeySpecifier::new(keyring, &key_name, key_version);
Ok(Self::Gcp(
GcpSigner::new(client, specifier, None)
.await
.map_err(|e| WalletSignerError::Gcp(Box::new(e)))?,
))
}
#[cfg(not(feature = "gcp-kms"))]
{
let _ = project_id;
let _ = location;
let _ = keyring;
let _ = key_name;
let _ = key_version;
Err(WalletSignerError::gcp_unsupported())
}
}
pub fn from_turnkey(
api_private_key: String,
organization_id: String,
address: Address,
) -> Result<Self> {
#[cfg(feature = "turnkey")]
{
Ok(Self::Turnkey(TurnkeySigner::from_api_key(
&api_private_key,
organization_id,
address,
None,
)?))
}
#[cfg(not(feature = "turnkey"))]
{
let _ = api_private_key;
let _ = organization_id;
let _ = address;
Err(WalletSignerError::UnsupportedSigner("Turnkey"))
}
}
pub fn from_private_key(private_key: &B256) -> Result<Self> {
Ok(Self::Local(PrivateKeySigner::from_bytes(private_key)?))
}
/// Returns a list of addresses available to use with current signer
///
/// - for Ledger and Trezor signers the number of addresses to retrieve is specified as argument
/// - the result for Ledger signers includes addresses available for both LedgerLive and Legacy
/// derivation paths
/// - for Local and AWS signers the result contains a single address
/// - errors when retrieving addresses are logged but do not prevent returning available
/// addresses
pub async fn available_senders(&self, max: usize) -> Result<Vec<Address>> {
let mut senders = HashSet::new();
match self {
Self::Local(local) => {
senders.insert(local.address());
}
Self::Ledger(ledger) => {
// Try LedgerLive derivation path
for i in 0..max {
match ledger.get_address_with_path(&LedgerHDPath::LedgerLive(i)).await {
Ok(address) => {
senders.insert(address);
}
Err(e) => {
warn!("Failed to get Ledger address at index {i} (LedgerLive): {e}");
}
}
}
// Try Legacy derivation path
for i in 0..max {
match ledger.get_address_with_path(&LedgerHDPath::Legacy(i)).await {
Ok(address) => {
senders.insert(address);
}
Err(e) => {
warn!("Failed to get Ledger address at index {i} (Legacy): {e}");
}
}
}
}
Self::Trezor(trezor) => {
for i in 0..max {
match trezor.get_address_with_path(&TrezorHDPath::TrezorLive(i)).await {
Ok(address) => {
senders.insert(address);
}
Err(e) => {
warn!("Failed to get Trezor address at index {i} (TrezorLive): {e}",);
}
}
}
}
Self::Browser(browser) => {
senders.insert(alloy_signer::Signer::address(browser));
}
#[cfg(feature = "aws-kms")]
Self::Aws(aws) => {
senders.insert(alloy_signer::Signer::address(aws));
}
#[cfg(feature = "gcp-kms")]
Self::Gcp(gcp) => {
senders.insert(alloy_signer::Signer::address(gcp));
}
#[cfg(feature = "turnkey")]
Self::Turnkey(turnkey) => {
senders.insert(alloy_signer::Signer::address(turnkey));
}
}
Ok(senders.into_iter().collect())
}
pub fn from_mnemonic(
mnemonic: &str,
passphrase: Option<&str>,
derivation_path: Option<&str>,
index: u32,
) -> Result<Self> {
let mut builder = MnemonicBuilder::<English>::default().phrase(mnemonic);
if let Some(passphrase) = passphrase {
builder = builder.password(passphrase)
}
builder = if let Some(hd_path) = derivation_path {
builder.derivation_path(hd_path)?
} else {
builder.index(index)?
};
Ok(Self::Local(builder.build()?))
}
}
macro_rules! delegate {
($s:ident, $inner:ident => $e:expr) => {
match $s {
Self::Local($inner) => $e,
Self::Ledger($inner) => $e,
Self::Trezor($inner) => $e,
Self::Browser($inner) => $e,
#[cfg(feature = "aws-kms")]
Self::Aws($inner) => $e,
#[cfg(feature = "gcp-kms")]
Self::Gcp($inner) => $e,
#[cfg(feature = "turnkey")]
Self::Turnkey($inner) => $e,
}
};
}
#[async_trait]
impl Signer for WalletSigner {
/// Signs the given hash.
async fn sign_hash(&self, hash: &B256) -> alloy_signer::Result<Signature> {
delegate!(self, inner => inner.sign_hash(hash)).await
}
async fn sign_message(&self, message: &[u8]) -> alloy_signer::Result<Signature> {
delegate!(self, inner => inner.sign_message(message)).await
}
fn address(&self) -> Address {
delegate!(self, inner => alloy_signer::Signer::address(inner))
}
fn chain_id(&self) -> Option<ChainId> {
delegate!(self, inner => inner.chain_id())
}
fn set_chain_id(&mut self, chain_id: Option<ChainId>) {
delegate!(self, inner => inner.set_chain_id(chain_id))
}
async fn sign_typed_data<T: SolStruct + Send + Sync>(
&self,
payload: &T,
domain: &Eip712Domain,
) -> alloy_signer::Result<Signature>
where
Self: Sized,
{
delegate!(self, inner => inner.sign_typed_data(payload, domain)).await
}
async fn sign_dynamic_typed_data(
&self,
payload: &TypedData,
) -> alloy_signer::Result<Signature> {
delegate!(self, inner => inner.sign_dynamic_typed_data(payload)).await
}
}
#[async_trait]
impl TxSigner<Signature> for WalletSigner {
fn address(&self) -> Address {
delegate!(self, inner => alloy_signer::Signer::address(inner))
}
async fn sign_transaction(
&self,
tx: &mut dyn SignableTransaction<Signature>,
) -> alloy_signer::Result<Signature> {
delegate!(self, inner => inner.sign_transaction(tx)).await
}
}
impl NetworkWallet<FoundryNetwork> for WalletSigner {
fn default_signer_address(&self) -> Address {
alloy_signer::Signer::address(self)
}
fn has_signer_for(&self, address: &Address) -> bool {
self.default_signer_address() == *address
}
fn signer_addresses(&self) -> impl Iterator<Item = Address> {
std::iter::once(self.default_signer_address())
}
async fn sign_transaction_from(
&self,
sender: Address,
tx: FoundryTypedTx,
) -> alloy_signer::Result<FoundryTxEnvelope> {
if sender != self.default_signer_address() {
return Err(alloy_signer::Error::other("Signer address mismatch"));
}
match tx {
FoundryTypedTx::Legacy(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
Ok(FoundryTxEnvelope::Legacy(inner.into_signed(sig)))
}
FoundryTypedTx::Eip2930(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
Ok(FoundryTxEnvelope::Eip2930(inner.into_signed(sig)))
}
FoundryTypedTx::Eip1559(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
Ok(FoundryTxEnvelope::Eip1559(inner.into_signed(sig)))
}
FoundryTypedTx::Eip4844(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
Ok(FoundryTxEnvelope::Eip4844(inner.into_signed(sig)))
}
FoundryTypedTx::Eip7702(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
Ok(FoundryTxEnvelope::Eip7702(inner.into_signed(sig)))
}
FoundryTypedTx::Deposit(inner) => {
// Deposit transactions don't require signing
Ok(FoundryTxEnvelope::Deposit(Sealed::new(inner)))
}
FoundryTypedTx::Tempo(mut inner) => {
let sig = TxSigner::sign_transaction(self, &mut inner).await?;
let tempo_sig: TempoSignature = sig.into();
Ok(FoundryTxEnvelope::Tempo(inner.into_signed(tempo_sig)))
}
}
}
#[doc(hidden)]
async fn sign_request(
&self,
request: FoundryTransactionRequest,
) -> alloy_signer::Result<FoundryTxEnvelope> {
let sender = request.from().unwrap_or_else(|| self.default_signer_address());
let tx = request.build_typed_tx().map_err(|_| {
alloy_signer::Error::other("Failed to build typed transaction from request")
})?;
self.sign_transaction_from(sender, tx).await
}
}
/// Signers that require user action to be obtained.
#[derive(Debug, Clone)]
pub enum PendingSigner {
Keystore(PathBuf),
Interactive,
}
impl PendingSigner {
pub fn unlock(self) -> Result<WalletSigner> {
match self {
Self::Keystore(path) => {
let password = rpassword::prompt_password("Enter keystore password:")?;
match PrivateKeySigner::decrypt_keystore(path, password) {
Ok(signer) => Ok(WalletSigner::Local(signer)),
Err(e) => match e {
// Catch the `MacMismatch` error, which indicates an incorrect password and
// return a more user-friendly `IncorrectKeystorePassword`.
alloy_signer_local::LocalSignerError::EthKeystoreError(
eth_keystore::KeystoreError::MacMismatch,
) => Err(WalletSignerError::IncorrectKeystorePassword),
_ => Err(WalletSignerError::Local(e)),
},
}
}
Self::Interactive => {
let private_key = rpassword::prompt_password("Enter private key:")?;
Ok(WalletSigner::from_private_key(&hex::FromHex::from_hex(private_key)?)?)
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/lib.rs | crates/wallets/src/lib.rs | //! # foundry-wallets
//!
//! Utilities for working with multiple signers.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate foundry_common;
#[macro_use]
extern crate tracing;
pub mod error;
pub mod opts;
pub mod signer;
pub mod utils;
pub mod wallet_browser;
pub mod wallet_multi;
pub mod wallet_raw;
pub use opts::WalletOpts;
pub use signer::{PendingSigner, WalletSigner};
pub use wallet_multi::MultiWalletOpts;
pub use wallet_raw::RawWalletOpts;
#[cfg(feature = "aws-kms")]
use aws_config as _;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/error.rs | crates/wallets/src/error.rs | use alloy_primitives::hex::FromHexError;
use alloy_signer::k256::ecdsa;
use alloy_signer_ledger::LedgerError;
use alloy_signer_local::LocalSignerError;
use alloy_signer_trezor::TrezorError;
#[cfg(feature = "aws-kms")]
use alloy_signer_aws::AwsSignerError;
#[cfg(feature = "gcp-kms")]
use alloy_signer_gcp::GcpSignerError;
#[cfg(feature = "turnkey")]
use alloy_signer_turnkey::TurnkeySignerError;
use crate::wallet_browser::error::BrowserWalletError;
#[derive(Debug, thiserror::Error)]
pub enum PrivateKeyError {
#[error("Failed to create wallet from private key. Private key is invalid hex: {0}")]
InvalidHex(#[from] FromHexError),
#[error(
"Failed to create wallet from private key. Invalid private key. But env var {0} exists. Is the `$` anchor missing?"
)]
ExistsAsEnvVar(String),
}
#[derive(Debug, thiserror::Error)]
pub enum WalletSignerError {
#[error(transparent)]
Local(#[from] LocalSignerError),
#[error("Failed to decrypt keystore: incorrect password")]
IncorrectKeystorePassword,
#[error(transparent)]
Ledger(#[from] LedgerError),
#[error(transparent)]
Trezor(#[from] TrezorError),
#[error(transparent)]
#[cfg(feature = "aws-kms")]
Aws(#[from] Box<AwsSignerError>),
#[error(transparent)]
#[cfg(feature = "gcp-kms")]
Gcp(#[from] Box<GcpSignerError>),
#[error(transparent)]
#[cfg(feature = "turnkey")]
Turnkey(#[from] TurnkeySignerError),
#[error(transparent)]
Browser(#[from] BrowserWalletError),
#[error(transparent)]
Io(#[from] std::io::Error),
#[error(transparent)]
InvalidHex(#[from] FromHexError),
#[error(transparent)]
Ecdsa(#[from] ecdsa::Error),
#[error("foundry was not built with support for {0} signer")]
UnsupportedSigner(&'static str),
}
impl WalletSignerError {
pub fn aws_unsupported() -> Self {
Self::UnsupportedSigner("AWS KMS")
}
pub fn gcp_unsupported() -> Self {
Self::UnsupportedSigner("Google Cloud KMS")
}
pub fn turnkey_unsupported() -> Self {
Self::UnsupportedSigner("Turnkey")
}
pub fn browser_unsupported() -> Self {
Self::UnsupportedSigner("Browser Wallet")
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/utils.rs | crates/wallets/src/utils.rs | use crate::{PendingSigner, WalletSigner, error::PrivateKeyError};
use alloy_primitives::{B256, hex::FromHex};
use alloy_signer_ledger::HDPath as LedgerHDPath;
use alloy_signer_local::PrivateKeySigner;
use alloy_signer_trezor::HDPath as TrezorHDPath;
use eyre::{Context, Result};
use foundry_config::Config;
use std::{
fs,
path::{Path, PathBuf},
};
fn ensure_pk_not_env(pk: &str) -> Result<()> {
if !pk.starts_with("0x") && std::env::var(pk).is_ok() {
return Err(PrivateKeyError::ExistsAsEnvVar(pk.to_string()).into());
}
Ok(())
}
/// Validates and sanitizes user inputs, returning configured [WalletSigner].
pub fn create_private_key_signer(private_key_str: &str) -> Result<WalletSigner> {
let Ok(private_key) = B256::from_hex(private_key_str) else {
ensure_pk_not_env(private_key_str)?;
eyre::bail!("Failed to decode private key")
};
match PrivateKeySigner::from_bytes(&private_key) {
Ok(pk) => Ok(WalletSigner::Local(pk)),
Err(err) => {
ensure_pk_not_env(private_key_str)?;
eyre::bail!("Failed to create wallet from private key: {err}")
}
}
}
/// Creates [WalletSigner] instance from given mnemonic parameters.
///
/// Mnemonic can be either a file path or a mnemonic phrase.
pub fn create_mnemonic_signer(
mnemonic: &str,
passphrase: Option<&str>,
hd_path: Option<&str>,
index: u32,
) -> Result<WalletSigner> {
let mnemonic = if Path::new(mnemonic).is_file() {
fs::read_to_string(mnemonic)?
} else {
mnemonic.to_owned()
};
let mnemonic = mnemonic.split_whitespace().collect::<Vec<_>>().join(" ");
Ok(WalletSigner::from_mnemonic(&mnemonic, passphrase, hd_path, index)?)
}
/// Creates [WalletSigner] instance from given Ledger parameters.
pub async fn create_ledger_signer(
hd_path: Option<&str>,
mnemonic_index: u32,
) -> Result<WalletSigner> {
let derivation = if let Some(hd_path) = hd_path {
LedgerHDPath::Other(hd_path.to_owned())
} else {
LedgerHDPath::LedgerLive(mnemonic_index as usize)
};
WalletSigner::from_ledger_path(derivation).await.wrap_err_with(|| {
"\
Could not connect to Ledger device.
Make sure it's connected and unlocked, with no other desktop wallet apps open."
})
}
/// Creates [WalletSigner] instance from given Trezor parameters.
pub async fn create_trezor_signer(
hd_path: Option<&str>,
mnemonic_index: u32,
) -> Result<WalletSigner> {
let derivation = if let Some(hd_path) = hd_path {
TrezorHDPath::Other(hd_path.to_owned())
} else {
TrezorHDPath::TrezorLive(mnemonic_index as usize)
};
WalletSigner::from_trezor_path(derivation).await.wrap_err_with(|| {
"\
Could not connect to Trezor device.
Make sure it's connected and unlocked, with no other conflicting desktop wallet apps open."
})
}
pub fn maybe_get_keystore_path(
maybe_path: Option<&str>,
maybe_name: Option<&str>,
) -> Result<Option<PathBuf>> {
let default_keystore_dir = Config::foundry_keystores_dir()
.ok_or_else(|| eyre::eyre!("Could not find the default keystore directory."))?;
Ok(maybe_path
.map(PathBuf::from)
.or_else(|| maybe_name.map(|name| default_keystore_dir.join(name))))
}
/// Creates keystore signer from given parameters.
///
/// If correct password or password file is provided, the keystore is decrypted and a [WalletSigner]
/// is returned.
///
/// Otherwise, a [PendingSigner] is returned, which can be used to unlock the keystore later,
/// prompting user for password.
pub fn create_keystore_signer(
path: &PathBuf,
maybe_password: Option<&str>,
maybe_password_file: Option<&str>,
) -> Result<(Option<WalletSigner>, Option<PendingSigner>)> {
if !path.exists() {
eyre::bail!("Keystore file `{path:?}` does not exist")
}
if path.is_dir() {
eyre::bail!(
"Keystore path `{path:?}` is a directory. Please specify the keystore file directly."
)
}
let password = match (maybe_password, maybe_password_file) {
(Some(password), _) => Ok(Some(password.to_string())),
(_, Some(password_file)) => {
let password_file = Path::new(password_file);
if !password_file.is_file() {
Err(eyre::eyre!("Keystore password file `{password_file:?}` does not exist"))
} else {
Ok(Some(
fs::read_to_string(password_file)
.wrap_err_with(|| {
format!("Failed to read keystore password file at {password_file:?}")
})?
.trim_end()
.to_string(),
))
}
}
(None, None) => Ok(None),
}?;
if let Some(password) = password {
let wallet = PrivateKeySigner::decrypt_keystore(path, password)
.wrap_err_with(|| format!("Failed to decrypt keystore {path:?}"))?;
Ok((Some(WalletSigner::Local(wallet)), None))
} else {
Ok((None, Some(PendingSigner::Keystore(path.clone()))))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_private_key_signer() {
let pk = B256::random();
let pk_str = pk.to_string();
assert!(create_private_key_signer(&pk_str).is_ok());
// skip 0x
assert!(create_private_key_signer(&pk_str[2..]).is_ok());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/opts.rs | crates/wallets/src/opts.rs | use crate::{signer::WalletSigner, utils, wallet_raw::RawWalletOpts};
use alloy_primitives::Address;
use clap::Parser;
use eyre::Result;
use serde::Serialize;
/// The wallet options can either be:
/// 1. Raw (via private key / mnemonic file, see `RawWallet`)
/// 2. Keystore (via file path)
/// 3. Ledger
/// 4. Trezor
/// 5. AWS KMS
/// 6. Google Cloud KMS
/// 7. Turnkey
/// 8. Browser wallet
#[derive(Clone, Debug, Default, Serialize, Parser)]
#[command(next_help_heading = "Wallet options", about = None, long_about = None)]
pub struct WalletOpts {
/// The sender account.
#[arg(
long,
short,
value_name = "ADDRESS",
help_heading = "Wallet options - raw",
env = "ETH_FROM"
)]
pub from: Option<Address>,
#[command(flatten)]
pub raw: RawWalletOpts,
/// Use the keystore in the given folder or file.
#[arg(
long = "keystore",
help_heading = "Wallet options - keystore",
value_name = "PATH",
env = "ETH_KEYSTORE"
)]
pub keystore_path: Option<String>,
/// Use a keystore from the default keystores folder (~/.foundry/keystores) by its filename
#[arg(
long = "account",
help_heading = "Wallet options - keystore",
value_name = "ACCOUNT_NAME",
env = "ETH_KEYSTORE_ACCOUNT",
conflicts_with = "keystore_path"
)]
pub keystore_account_name: Option<String>,
/// The keystore password.
///
/// Used with --keystore.
#[arg(
long = "password",
help_heading = "Wallet options - keystore",
requires = "keystore_path",
value_name = "PASSWORD"
)]
pub keystore_password: Option<String>,
/// The keystore password file path.
///
/// Used with --keystore.
#[arg(
long = "password-file",
help_heading = "Wallet options - keystore",
requires = "keystore_path",
value_name = "PASSWORD_FILE",
env = "ETH_PASSWORD"
)]
pub keystore_password_file: Option<String>,
/// Use a Ledger hardware wallet.
#[arg(long, short, help_heading = "Wallet options - hardware wallet")]
pub ledger: bool,
/// Use a Trezor hardware wallet.
#[arg(long, short, help_heading = "Wallet options - hardware wallet")]
pub trezor: bool,
/// Use AWS Key Management Service.
///
/// Ensure the AWS_KMS_KEY_ID environment variable is set.
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "aws-kms"))]
pub aws: bool,
/// Use Google Cloud Key Management Service.
///
/// Ensure the following environment variables are set: GCP_PROJECT_ID, GCP_LOCATION,
/// GCP_KEY_RING, GCP_KEY_NAME, GCP_KEY_VERSION.
///
/// See: <https://cloud.google.com/kms/docs>
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "gcp-kms"))]
pub gcp: bool,
/// Use Turnkey.
///
/// Ensure the following environment variables are set: TURNKEY_API_PRIVATE_KEY,
/// TURNKEY_ORGANIZATION_ID, TURNKEY_ADDRESS.
///
/// See: <https://docs.turnkey.com/getting-started/quickstart>
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "turnkey"))]
pub turnkey: bool,
/// Use a browser wallet.
#[arg(long, help_heading = "Wallet options - browser")]
pub browser: bool,
/// Port for the browser wallet server.
#[arg(
long,
help_heading = "Wallet options - browser",
value_name = "PORT",
default_value = "9545",
requires = "browser"
)]
pub browser_port: u16,
/// Whether to open the browser for wallet connection.
#[arg(
long,
help_heading = "Wallet options - browser",
default_value_t = false,
requires = "browser"
)]
pub browser_disable_open: bool,
/// Enable development mode for the browser wallet.
/// This relaxes certain security features for local development.
///
/// **WARNING**: This should only be used in a development environment.
#[arg(long, help_heading = "Wallet options - browser", hide = true)]
pub browser_development: bool,
}
impl WalletOpts {
pub async fn signer(&self) -> Result<WalletSigner> {
trace!("start finding signer");
let get_env = |key: &str| {
std::env::var(key)
.map_err(|_| eyre::eyre!("{key} environment variable is required for signer"))
};
let signer = if self.ledger {
utils::create_ledger_signer(self.raw.hd_path.as_deref(), self.raw.mnemonic_index)
.await?
} else if self.trezor {
utils::create_trezor_signer(self.raw.hd_path.as_deref(), self.raw.mnemonic_index)
.await?
} else if self.aws {
let key_id = get_env("AWS_KMS_KEY_ID")?;
WalletSigner::from_aws(key_id).await?
} else if self.gcp {
let project_id = get_env("GCP_PROJECT_ID")?;
let location = get_env("GCP_LOCATION")?;
let keyring = get_env("GCP_KEY_RING")?;
let key_name = get_env("GCP_KEY_NAME")?;
let key_version = get_env("GCP_KEY_VERSION")?
.parse()
.map_err(|_| eyre::eyre!("GCP_KEY_VERSION could not be parsed into u64"))?;
WalletSigner::from_gcp(project_id, location, keyring, key_name, key_version).await?
} else if self.turnkey {
let api_private_key = get_env("TURNKEY_API_PRIVATE_KEY")?;
let organization_id = get_env("TURNKEY_ORGANIZATION_ID")?;
let address_str = get_env("TURNKEY_ADDRESS")?;
let address = address_str.parse().map_err(|_| {
eyre::eyre!("TURNKEY_ADDRESS could not be parsed as an Ethereum address")
})?;
WalletSigner::from_turnkey(api_private_key, organization_id, address)?
} else if self.browser {
WalletSigner::from_browser(
self.browser_port,
!self.browser_disable_open,
self.browser_development,
)
.await?
} else if let Some(raw_wallet) = self.raw.signer()? {
raw_wallet
} else if let Some(path) = utils::maybe_get_keystore_path(
self.keystore_path.as_deref(),
self.keystore_account_name.as_deref(),
)? {
let (maybe_signer, maybe_pending) = utils::create_keystore_signer(
&path,
self.keystore_password.as_deref(),
self.keystore_password_file.as_deref(),
)?;
if let Some(pending) = maybe_pending {
pending.unlock()?
} else if let Some(signer) = maybe_signer {
signer
} else {
unreachable!()
}
} else {
eyre::bail!(
"\
Error accessing local wallet. Did you pass a keystore, hardware wallet, private key or mnemonic?
Run the command with --help flag for more information or use the corresponding CLI
flag to set your key via:
--keystore
--interactive
--private-key
--mnemonic-path
--aws
--gcp
--turnkey
--trezor
--ledger
--browser
Alternatively, when using the `cast send` or `cast mktx` commands with a local node
or RPC that has unlocked accounts, the --unlocked or --ethsign flags can be used,
respectively. The sender address can be specified by setting the `ETH_FROM` environment
variable to the desired unlocked account address, or by providing the address directly
using the --from flag."
)
};
Ok(signer)
}
}
impl From<RawWalletOpts> for WalletOpts {
fn from(options: RawWalletOpts) -> Self {
Self { raw: options, ..Default::default() }
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_signer::Signer;
use std::{path::Path, str::FromStr};
#[tokio::test]
async fn find_keystore() {
let keystore =
Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/../cast/tests/fixtures/keystore"));
let keystore_file = keystore
.join("UTC--2022-12-20T10-30-43.591916000Z--ec554aeafe75601aaab43bd4621a22284db566c2");
let password_file = keystore.join("password-ec554");
let wallet: WalletOpts = WalletOpts::parse_from([
"foundry-cli",
"--from",
"560d246fcddc9ea98a8b032c9a2f474efb493c28",
"--keystore",
keystore_file.to_str().unwrap(),
"--password-file",
password_file.to_str().unwrap(),
]);
let signer = wallet.signer().await.unwrap();
assert_eq!(
signer.address(),
Address::from_str("ec554aeafe75601aaab43bd4621a22284db566c2").unwrap()
);
}
#[tokio::test]
async fn illformed_private_key_generates_user_friendly_error() {
let wallet = WalletOpts {
raw: RawWalletOpts {
interactive: false,
private_key: Some("123".to_string()),
mnemonic: None,
mnemonic_passphrase: None,
hd_path: None,
mnemonic_index: 0,
},
from: None,
keystore_path: None,
keystore_account_name: None,
keystore_password: None,
keystore_password_file: None,
ledger: false,
trezor: false,
aws: false,
gcp: false,
turnkey: false,
browser: false,
browser_port: 9545,
browser_development: false,
browser_disable_open: false,
};
match wallet.signer().await {
Ok(_) => {
panic!("illformed private key shouldn't decode")
}
Err(x) => {
assert!(
x.to_string().contains("Failed to decode private key"),
"Error message is not user-friendly"
);
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/signer.rs | crates/wallets/src/wallet_browser/signer.rs | use std::{
sync::Arc,
time::{Duration, Instant},
};
use alloy_consensus::SignableTransaction;
use alloy_dyn_abi::TypedData;
use alloy_network::TxSigner;
use alloy_primitives::{Address, B256, ChainId, hex};
use alloy_rpc_types::TransactionRequest;
use alloy_signer::{Result, Signature, Signer, SignerSync};
use alloy_sol_types::{Eip712Domain, SolStruct};
use async_trait::async_trait;
use tokio::sync::Mutex;
use uuid::Uuid;
use crate::wallet_browser::{
server::BrowserWalletServer,
types::{BrowserSignRequest, BrowserTransactionRequest, Connection, SignRequest, SignType},
};
#[derive(Clone, Debug)]
pub struct BrowserSigner {
server: Arc<Mutex<BrowserWalletServer>>,
address: Address,
chain_id: ChainId,
}
impl BrowserSigner {
pub async fn new(
port: u16,
open_browser: bool,
timeout: Duration,
development: bool,
) -> Result<Self> {
let mut server = BrowserWalletServer::new(port, open_browser, timeout, development);
server.start().await.map_err(alloy_signer::Error::other)?;
let _ = sh_warn!("Browser wallet is still in early development. Use with caution!");
let _ = sh_println!("Opening browser for wallet connection...");
let _ = sh_println!("Waiting for wallet connection...");
let start = Instant::now();
loop {
if let Some(Connection { address, chain_id }) = server.get_connection().await {
let _ = sh_println!("Wallet connected: {}", address);
let _ = sh_println!("Chain ID: {}", chain_id);
return Ok(Self { server: Arc::new(Mutex::new(server)), address, chain_id });
}
if start.elapsed() > timeout {
return Err(alloy_signer::Error::other("Wallet connection timeout"));
}
tokio::time::sleep(Duration::from_secs(1)).await;
}
}
/// Send a transaction through the browser wallet.
pub async fn send_transaction_via_browser(
&self,
tx_request: TransactionRequest,
) -> Result<B256> {
if let Some(from) = tx_request.from
&& from != self.address
{
return Err(alloy_signer::Error::other(
"Transaction `from` address does not match connected wallet address",
));
}
if let Some(chain_id) = tx_request.chain_id
&& chain_id != self.chain_id
{
return Err(alloy_signer::Error::other(
"Transaction `chainId` does not match connected wallet chain ID",
));
}
let request = BrowserTransactionRequest { id: Uuid::new_v4(), request: tx_request };
let server = self.server.lock().await;
let tx_hash =
server.request_transaction(request).await.map_err(alloy_signer::Error::other)?;
tokio::time::sleep(Duration::from_millis(500)).await;
Ok(tx_hash)
}
}
impl SignerSync for BrowserSigner {
fn sign_hash_sync(&self, _hash: &B256) -> Result<Signature> {
Err(alloy_signer::Error::other(
"Browser wallets cannot sign raw hashes. Use sign_message or send_transaction instead.",
))
}
fn sign_message_sync(&self, _message: &[u8]) -> Result<Signature> {
Err(alloy_signer::Error::other(
"Browser signer requires async operations. Use sign_message instead.",
))
}
fn chain_id_sync(&self) -> Option<ChainId> {
Some(self.chain_id)
}
}
#[async_trait]
impl Signer for BrowserSigner {
async fn sign_hash(&self, _hash: &B256) -> Result<Signature> {
Err(alloy_signer::Error::other(
"Browser wallets sign and send transactions in one step. Use eth_sendTransaction instead.",
))
}
async fn sign_typed_data<T: SolStruct + Send + Sync>(
&self,
_payload: &T,
_domain: &Eip712Domain,
) -> Result<Signature>
where
Self: Sized,
{
// Not directly supported - use sign_dynamic_typed_data instead
Err(alloy_signer::Error::other(
"Browser wallets cannot sign typed data directly. Use sign_dynamic_typed_data instead.",
))
}
async fn sign_message(&self, message: &[u8]) -> Result<Signature> {
let request = BrowserSignRequest {
id: Uuid::new_v4(),
sign_type: SignType::PersonalSign,
request: SignRequest { message: hex::encode_prefixed(message), address: self.address },
};
let server = self.server.lock().await;
let signature =
server.request_signing(request).await.map_err(alloy_signer::Error::other)?;
Signature::try_from(signature.as_ref())
.map_err(|e| alloy_signer::Error::other(format!("Invalid signature: {e}")))
}
async fn sign_dynamic_typed_data(&self, payload: &TypedData) -> Result<Signature> {
let server = self.server.lock().await;
let signature = server
.request_typed_data_signing(self.address, payload.clone())
.await
.map_err(alloy_signer::Error::other)?;
// Parse the signature
Signature::try_from(signature.as_ref())
.map_err(|e| alloy_signer::Error::other(format!("Invalid signature: {e}")))
}
fn address(&self) -> Address {
self.address
}
fn chain_id(&self) -> Option<ChainId> {
Some(self.chain_id)
}
fn set_chain_id(&mut self, chain_id: Option<ChainId>) {
if let Some(id) = chain_id {
self.chain_id = id;
}
}
}
#[async_trait]
impl TxSigner<Signature> for BrowserSigner {
fn address(&self) -> Address {
self.address
}
async fn sign_transaction(
&self,
_tx: &mut dyn SignableTransaction<Signature>,
) -> Result<Signature> {
Err(alloy_signer::Error::other("Use send_transaction_via_browser for browser wallets"))
}
}
impl Drop for BrowserSigner {
fn drop(&mut self) {
let server = self.server.clone();
tokio::spawn(async move {
let mut server = server.lock().await;
let _ = server.stop().await;
});
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/router.rs | crates/wallets/src/wallet_browser/router.rs | use std::sync::Arc;
use axum::{
Router,
extract::{Request, State},
http::{HeaderValue, Method, StatusCode, header},
middleware::{self, Next},
response::Response,
routing::{get, post},
};
use tower::ServiceBuilder;
use tower_http::{cors::CorsLayer, set_header::SetResponseHeaderLayer};
use crate::wallet_browser::{handlers, state::BrowserWalletState};
pub async fn build_router(state: Arc<BrowserWalletState>, port: u16) -> Router {
let api = Router::new()
.route("/transaction/request", get(handlers::get_next_transaction_request))
.route("/transaction/response", post(handlers::post_transaction_response))
.route("/signing/request", get(handlers::get_next_signing_request))
.route("/signing/response", post(handlers::post_signing_response))
.route("/connection", get(handlers::get_connection_info))
.route("/connection", post(handlers::post_connection_update))
.route_layer(middleware::from_fn_with_state(state.clone(), require_session_token))
.with_state(state.clone());
let mut origins = vec![format!("http://127.0.0.1:{port}").parse().unwrap()];
// Allow default port of 5173 in development mode.
if state.is_development() {
origins.push("https://localhost:5173".to_string().parse().unwrap());
}
let security_headers = ServiceBuilder::new()
.layer(SetResponseHeaderLayer::if_not_present(
header::CONTENT_SECURITY_POLICY,
HeaderValue::from_static(concat!(
"default-src 'none'; ",
"object-src 'none'; ",
"base-uri 'none'; ",
"frame-ancestors 'none'; ",
"img-src 'self'; ",
"font-src 'none'; ",
"connect-src 'self' https: http: wss: ws:;",
"style-src 'self'; ",
"script-src 'self'; ",
"form-action 'none'; ",
"worker-src 'none'; ",
"frame-src https://id.porto.sh;"
)),
))
.layer(SetResponseHeaderLayer::if_not_present(
header::REFERRER_POLICY,
HeaderValue::from_static("no-referrer"),
))
.layer(SetResponseHeaderLayer::if_not_present(
header::X_CONTENT_TYPE_OPTIONS,
HeaderValue::from_static("nosniff"),
))
.layer(
CorsLayer::new()
.allow_origin(origins)
.allow_methods([Method::GET, Method::POST, Method::OPTIONS])
.allow_headers([header::CONTENT_TYPE])
.allow_credentials(false),
);
Router::new()
.route("/", get(handlers::serve_index))
.route("/styles.css", get(handlers::serve_css))
.route("/main.js", get(handlers::serve_js))
.route("/banner.png", get(handlers::serve_banner_png))
.route("/logo.png", get(handlers::serve_logo_png))
.nest("/api", api)
.layer(security_headers)
.with_state(state)
}
async fn require_session_token(
State(state): State<Arc<BrowserWalletState>>,
req: Request,
next: Next,
) -> Result<Response, StatusCode> {
if req.method() == Method::OPTIONS {
return Ok(next.run(req).await);
}
// In development mode, skip session token check.
if state.is_development() {
return Ok(next.run(req).await);
}
let expected = state.session_token();
let provided = req
.headers()
.get("X-Session-Token")
.and_then(|v| v.to_str().ok())
.ok_or(StatusCode::FORBIDDEN)?;
if provided != expected {
return Err(StatusCode::FORBIDDEN);
}
Ok(next.run(req).await)
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/handlers.rs | crates/wallets/src/wallet_browser/handlers.rs | use std::sync::Arc;
use axum::{
Json,
extract::State,
http::{
HeaderMap, HeaderValue,
header::{CACHE_CONTROL, CONTENT_TYPE, EXPIRES, PRAGMA},
},
response::Html,
};
use crate::wallet_browser::{
app::contents,
state::BrowserWalletState,
types::{
BrowserApiResponse, BrowserSignRequest, BrowserSignResponse, BrowserTransactionRequest,
BrowserTransactionResponse, Connection,
},
};
/// Serve index.html
pub(crate) async fn serve_index() -> impl axum::response::IntoResponse {
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("text/html; charset=utf-8"));
headers.insert(
CACHE_CONTROL,
HeaderValue::from_static("no-store, no-cache, must-revalidate, max-age=0"),
);
headers.insert(PRAGMA, HeaderValue::from_static("no-cache"));
headers.insert(EXPIRES, HeaderValue::from_static("0"));
(headers, Html(contents::INDEX_HTML))
}
/// Serve styles.css
pub(crate) async fn serve_css() -> impl axum::response::IntoResponse {
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("text/css; charset=utf-8"));
headers.insert(
CACHE_CONTROL,
HeaderValue::from_static("no-store, no-cache, must-revalidate, max-age=0"),
);
headers.insert(PRAGMA, HeaderValue::from_static("no-cache"));
headers.insert(EXPIRES, HeaderValue::from_static("0"));
(headers, contents::STYLES_CSS)
}
/// Serve main.js with injected session token.
pub(crate) async fn serve_js(
State(state): State<Arc<BrowserWalletState>>,
) -> impl axum::response::IntoResponse {
let token = state.session_token();
let js = format!("window.__SESSION_TOKEN__ = \"{}\";\n{}", token, contents::MAIN_JS);
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/javascript; charset=utf-8"));
headers.insert(
CACHE_CONTROL,
HeaderValue::from_static("no-store, no-cache, must-revalidate, max-age=0"),
);
headers.insert(PRAGMA, HeaderValue::from_static("no-cache"));
headers.insert(EXPIRES, HeaderValue::from_static("0"));
(headers, js)
}
/// Serve banner.png
pub(crate) async fn serve_banner_png() -> impl axum::response::IntoResponse {
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("image/png"));
headers.insert(CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
(headers, contents::BANNER_PNG)
}
/// Serve logo.png
pub(crate) async fn serve_logo_png() -> impl axum::response::IntoResponse {
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, HeaderValue::from_static("image/png"));
headers.insert(CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
(headers, contents::LOGO_PNG)
}
/// Get the next pending transaction request.
/// Route: GET /api/transaction/request
pub(crate) async fn get_next_transaction_request(
State(state): State<Arc<BrowserWalletState>>,
) -> Json<BrowserApiResponse<BrowserTransactionRequest>> {
match state.read_next_transaction_request().await {
Some(tx) => Json(BrowserApiResponse::with_data(tx)),
None => Json(BrowserApiResponse::error("No pending transaction request")),
}
}
/// Post a transaction response (signed or error).
/// Route: POST /api/transaction/response
pub(crate) async fn post_transaction_response(
State(state): State<Arc<BrowserWalletState>>,
Json(body): Json<BrowserTransactionResponse>,
) -> Json<BrowserApiResponse> {
// Ensure that the transaction request exists.
if !state.has_transaction_request(&body.id).await {
return Json(BrowserApiResponse::error("Unknown transaction id"));
}
// Ensure that exactly one of hash or error is provided.
match (&body.hash, &body.error) {
(None, None) => {
return Json(BrowserApiResponse::error("Either hash or error must be provided"));
}
(Some(_), Some(_)) => {
return Json(BrowserApiResponse::error("Only one of hash or error can be provided"));
}
_ => {}
}
// Validate transaction hash if provided.
if let Some(hash) = &body.hash {
// Check for all-zero hash
if hash.is_zero() {
return Json(BrowserApiResponse::error("Invalid (zero) transaction hash"));
}
// Sanity check: ensure the hash is exactly 32 bytes
if hash.as_slice().len() != 32 {
return Json(BrowserApiResponse::error(
"Malformed transaction hash (expected 32 bytes)",
));
}
}
state.add_transaction_response(body).await;
Json(BrowserApiResponse::ok())
}
/// Get the next pending signing request.
/// Route: GET /api/signing/request
pub(crate) async fn get_next_signing_request(
State(state): State<Arc<BrowserWalletState>>,
) -> Json<BrowserApiResponse<BrowserSignRequest>> {
match state.read_next_signing_request().await {
Some(req) => Json(BrowserApiResponse::with_data(req)),
None => Json(BrowserApiResponse::error("No pending signing request")),
}
}
/// Post a signing response (signature or error).
/// Route: POST /api/signing/response
pub(crate) async fn post_signing_response(
State(state): State<Arc<BrowserWalletState>>,
Json(body): Json<BrowserSignResponse>,
) -> Json<BrowserApiResponse> {
// Ensure that the signing request exists.
if !state.has_signing_request(&body.id).await {
return Json(BrowserApiResponse::error("Unknown signing request id"));
}
// Ensure that exactly one of signature or error is provided.
match (&body.signature, &body.error) {
(None, None) => {
return Json(BrowserApiResponse::error("Either signature or error must be provided"));
}
(Some(_), Some(_)) => {
return Json(BrowserApiResponse::error(
"Only one of signature or error can be provided",
));
}
_ => {}
}
state.add_signing_response(body).await;
Json(BrowserApiResponse::ok())
}
/// Get current connection information.
/// Route: GET /api/connection
pub(crate) async fn get_connection_info(
State(state): State<Arc<BrowserWalletState>>,
) -> Json<BrowserApiResponse<Option<Connection>>> {
let connection = state.get_connection().await;
Json(BrowserApiResponse::with_data(connection))
}
/// Post connection update (connect or disconnect).
/// Route: POST /api/connection
pub(crate) async fn post_connection_update(
State(state): State<Arc<BrowserWalletState>>,
Json(body): Json<Option<Connection>>,
) -> Json<BrowserApiResponse> {
state.set_connection(body).await;
Json(BrowserApiResponse::ok())
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/state.rs | crates/wallets/src/wallet_browser/state.rs | use std::sync::Arc;
use tokio::sync::{Mutex, RwLock};
use uuid::Uuid;
use crate::wallet_browser::{
queue::RequestQueue,
types::{
BrowserSignRequest, BrowserSignResponse, BrowserTransactionRequest,
BrowserTransactionResponse, Connection,
},
};
#[derive(Debug, Clone)]
pub(crate) struct BrowserWalletState {
/// Current information about the wallet connection.
connection: Arc<RwLock<Option<Connection>>>,
/// Request/response queue for transactions.
transactions: Arc<Mutex<RequestQueue<BrowserTransactionRequest, BrowserTransactionResponse>>>,
/// Request/response queue for signings.
signings: Arc<Mutex<RequestQueue<BrowserSignRequest, BrowserSignResponse>>>,
/// Unique session token for the wallet browser instance.
/// The CSP on the served page prevents this token from being loaded by other origins.
session_token: String,
/// If true, the server is running in development mode.
/// This relaxes certain security restrictions for local development.
///
/// **WARNING**: This should only be used in a development environment.
development: bool,
}
impl BrowserWalletState {
/// Create a new browser wallet state.
pub fn new(session_token: String, development: bool) -> Self {
Self {
connection: Arc::new(RwLock::new(None)),
transactions: Arc::new(Mutex::new(RequestQueue::new())),
signings: Arc::new(Mutex::new(RequestQueue::new())),
session_token,
development,
}
}
/// Get the session token.
pub fn session_token(&self) -> &str {
&self.session_token
}
/// Check if in development mode.
/// This relaxes certain security restrictions for local development.
///
/// **WARNING**: This should only be used in a development environment.
pub fn is_development(&self) -> bool {
self.development
}
/// Check if wallet is connected.
pub async fn is_connected(&self) -> bool {
self.connection.read().await.is_some()
}
/// Get current connection information.
pub async fn get_connection(&self) -> Option<Connection> {
*self.connection.read().await
}
/// Set connection information.
pub async fn set_connection(&self, connection: Option<Connection>) {
*self.connection.write().await = connection;
}
/// Add a transaction request.
pub async fn add_transaction_request(&self, request: BrowserTransactionRequest) {
self.transactions.lock().await.add_request(request);
}
/// Check if a transaction request exists.
pub async fn has_transaction_request(&self, id: &Uuid) -> bool {
self.transactions.lock().await.has_request(id)
}
/// Read the next transaction request.
pub async fn read_next_transaction_request(&self) -> Option<BrowserTransactionRequest> {
self.transactions.lock().await.read_request().cloned()
}
// Remove a transaction request.
pub async fn remove_transaction_request(&self, id: &Uuid) {
self.transactions.lock().await.remove_request(id);
}
/// Add transaction response.
pub async fn add_transaction_response(&self, response: BrowserTransactionResponse) {
let id = response.id;
let mut transactions = self.transactions.lock().await;
transactions.add_response(id, response);
transactions.remove_request(&id);
}
/// Get transaction response, removing it from the queue.
pub async fn get_transaction_response(&self, id: &Uuid) -> Option<BrowserTransactionResponse> {
self.transactions.lock().await.get_response(id)
}
/// Add a signing request.
pub async fn add_signing_request(&self, request: BrowserSignRequest) {
self.signings.lock().await.add_request(request);
}
/// Check if a signing request exists.
pub async fn has_signing_request(&self, id: &Uuid) -> bool {
self.signings.lock().await.has_request(id)
}
/// Read the next signing request.
pub async fn read_next_signing_request(&self) -> Option<BrowserSignRequest> {
self.signings.lock().await.read_request().cloned()
}
/// Remove a signing request.
pub async fn remove_signing_request(&self, id: &Uuid) {
self.signings.lock().await.remove_request(id);
}
/// Add signing response.
pub async fn add_signing_response(&self, response: BrowserSignResponse) {
let id = response.id;
let mut signings = self.signings.lock().await;
signings.add_response(id, response);
signings.remove_request(&id);
}
/// Get signing response, removing it from the queue.
pub async fn get_signing_response(&self, id: &Uuid) -> Option<BrowserSignResponse> {
self.signings.lock().await.get_response(id)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/error.rs | crates/wallets/src/wallet_browser/error.rs | use alloy_signer::Error as SignerError;
#[derive(Debug, thiserror::Error)]
pub enum BrowserWalletError {
#[error("{operation} request timed out")]
Timeout { operation: &'static str },
#[error("{operation} rejected: {reason}")]
Rejected { operation: &'static str, reason: String },
#[error("Wallet not connected")]
NotConnected,
#[error("Server error: {0}")]
ServerError(String),
}
impl From<BrowserWalletError> for SignerError {
fn from(err: BrowserWalletError) -> Self {
Self::other(err)
}
}
impl From<SignerError> for BrowserWalletError {
fn from(err: SignerError) -> Self {
Self::ServerError(err.to_string())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/types.rs | crates/wallets/src/wallet_browser/types.rs | use alloy_dyn_abi::TypedData;
use alloy_primitives::{Address, Bytes, ChainId, TxHash};
use alloy_rpc_types::TransactionRequest;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// Response format for API endpoints.
/// - `Ok(T)` serializes as: {"status":"ok","data": ...}
/// - `Ok(())` serializes as: {"status":"ok"} (no data key)
/// - `Error { message }` as: {"status":"error","message":"..."}
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "status", content = "data", rename_all = "lowercase")]
pub(crate) enum BrowserApiResponse<T = ()> {
Ok(T),
Error { message: String },
}
impl BrowserApiResponse<()> {
/// Create a successful response with no data.
pub fn ok() -> Self {
Self::Ok(())
}
}
impl<T> BrowserApiResponse<T> {
/// Create a successful response with the given data.
pub fn with_data(data: T) -> Self {
Self::Ok(data)
}
/// Create an error response with the given message.
pub fn error(msg: impl Into<String>) -> Self {
Self::Error { message: msg.into() }
}
}
/// Represents a transaction request sent to the browser wallet.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct BrowserTransactionRequest {
/// The unique identifier for the transaction.
pub id: Uuid,
/// The transaction request details.
pub request: TransactionRequest,
}
/// Represents a transaction response sent from the browser wallet.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub(crate) struct BrowserTransactionResponse {
/// The unique identifier for the transaction, must match the request ID sent earlier.
pub id: Uuid,
/// The transaction hash if the transaction was successful.
pub hash: Option<TxHash>,
/// The error message if the transaction failed.
pub error: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub enum SignType {
/// Standard personal sign: `eth_sign` / `personal_sign`
PersonalSign,
/// EIP-712 typed data sign: `eth_signTypedData_v4`
SignTypedDataV4,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct SignRequest {
/// The message to be signed.
pub message: String,
/// The address that should sign the message.
pub address: Address,
}
/// Represents a signing request sent to the browser wallet.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct BrowserSignRequest {
/// The unique identifier for the signing request.
pub id: Uuid,
/// The type of signing operation.
pub sign_type: SignType,
/// The sign request details.
pub request: SignRequest,
}
/// Represents a typed data signing request sent to the browser wallet.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
pub struct BrowserSignTypedDataRequest {
/// The unique identifier for the signing request.
pub id: Uuid,
/// The address that should sign the typed data.
pub address: Address,
/// The typed data to be signed.
pub typed_data: TypedData,
}
/// Represents a signing response sent from the browser wallet.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub(crate) struct BrowserSignResponse {
/// The unique identifier for the signing request, must match the request ID sent earlier.
pub id: Uuid,
/// The signature if the signing was successful.
pub signature: Option<Bytes>,
/// The error message if the signing failed.
pub error: Option<String>,
}
/// Represents an active connection to a browser wallet.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct Connection {
/// The address of the connected wallet.
pub address: Address,
/// The chain ID of the connected wallet.
pub chain_id: ChainId,
}
impl Connection {
/// Create a new connection instance.
pub fn new(address: Address, chain_id: ChainId) -> Self {
Self { address, chain_id }
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/mod.rs | crates/wallets/src/wallet_browser/mod.rs | pub mod error;
pub mod server;
pub mod signer;
pub mod state;
mod app;
mod handlers;
mod queue;
mod router;
mod types;
#[cfg(test)]
mod tests {
use std::time::Duration;
use alloy_primitives::{Address, Bytes, TxHash, TxKind, U256, address};
use alloy_rpc_types::TransactionRequest;
use axum::http::{HeaderMap, HeaderValue};
use tokio::task::JoinHandle;
use uuid::Uuid;
use crate::wallet_browser::{
error::BrowserWalletError,
server::BrowserWalletServer,
types::{
BrowserApiResponse, BrowserSignRequest, BrowserSignResponse, BrowserTransactionRequest,
BrowserTransactionResponse, Connection, SignRequest, SignType,
},
};
const ALICE: Address = address!("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266");
const BOB: Address = address!("0x70997970C51812dc3A010C7d01b50e0d17dc79C8");
const DEFAULT_TIMEOUT: Duration = Duration::from_secs(1);
const DEFAULT_DEVELOPMENT: bool = false;
#[tokio::test]
async fn test_setup_server() {
let mut server = create_server();
let client = client_with_token(&server);
// Check initial state
assert!(!server.is_connected().await);
assert!(!server.open_browser());
assert!(server.timeout() == DEFAULT_TIMEOUT);
// Start server
server.start().await.unwrap();
// Check that the transaction request queue is empty
check_transaction_request_queue_empty(&client, &server).await;
// Stop server
server.stop().await.unwrap();
}
#[tokio::test]
async fn test_connect_disconnect_wallet() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
// Check that the transaction request queue is empty
check_transaction_request_queue_empty(&client, &server).await;
// Connect Alice's wallet
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Check connection state
let Connection { address, chain_id } =
server.get_connection().await.expect("expected an active wallet connection");
assert_eq!(address, ALICE);
assert_eq!(chain_id, 1);
// Disconnect wallet
disconnect_wallet(&client, &server).await;
// Check disconnected state
assert!(!server.is_connected().await);
// Connect Bob's wallet
connect_wallet(&client, &server, Connection::new(BOB, 42)).await;
// Check connection state
let Connection { address, chain_id } =
server.get_connection().await.expect("expected an active wallet connection");
assert_eq!(address, BOB);
assert_eq!(chain_id, 42);
// Stop server
server.stop().await.unwrap();
}
#[tokio::test]
async fn test_switch_wallet() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
// Connect Alice, assert connected
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let Connection { address, chain_id } =
server.get_connection().await.expect("expected an active wallet connection");
assert_eq!(address, ALICE);
assert_eq!(chain_id, 1);
// Connect Bob, assert switched
connect_wallet(&client, &server, Connection::new(BOB, 42)).await;
let Connection { address, chain_id } =
server.get_connection().await.expect("expected an active wallet connection");
assert_eq!(address, BOB);
assert_eq!(chain_id, 42);
server.stop().await.unwrap();
}
#[tokio::test]
async fn test_transaction_response_both_hash_and_error_rejected() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Enqueue a tx
let (tx_request_id, tx_request) = create_browser_transaction_request();
let _handle = wait_for_transaction_signing(&server, tx_request).await;
check_transaction_request_content(&client, &server, tx_request_id).await;
// Wallet posts both hash and error -> should be rejected
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id,
hash: Some(TxHash::random()),
error: Some("should not have both".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Only one of hash or error can be provided");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_transaction_response_neither_hash_nor_error_rejected() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (tx_request_id, tx_request) = create_browser_transaction_request();
let _handle = wait_for_transaction_signing(&server, tx_request).await;
check_transaction_request_content(&client, &server, tx_request_id).await;
// Neither hash nor error -> rejected
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse { id: tx_request_id, hash: None, error: None })
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Either hash or error must be provided");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_transaction_response_zero_hash_rejected() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (tx_request_id, tx_request) = create_browser_transaction_request();
let _handle = wait_for_transaction_signing(&server, tx_request).await;
check_transaction_request_content(&client, &server, tx_request_id).await;
// Zero hash -> rejected
let zero = TxHash::new([0u8; 32]);
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse { id: tx_request_id, hash: Some(zero), error: None })
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
// Message text per your handler; adjust if you use a different string.
assert!(
message.contains("Invalid") || message.contains("Malformed"),
"unexpected message: {message}"
);
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_send_transaction_client_accept() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (tx_request_id, tx_request) = create_browser_transaction_request();
let handle = wait_for_transaction_signing(&server, tx_request).await;
check_transaction_request_content(&client, &server, tx_request_id).await;
// Simulate the wallet accepting and signing the tx
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id,
hash: Some(TxHash::random()),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// The join handle should now return the tx hash
let res = handle.await.expect("task panicked");
match res {
Ok(hash) => {
assert!(hash != TxHash::new([0; 32]));
}
other => panic!("expected success, got {other:?}"),
}
}
#[tokio::test]
async fn test_send_transaction_client_not_requested() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Create a random transaction response without a matching request
let tx_request_id = Uuid::new_v4();
// Simulate the wallet sending a response for an unknown request
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id,
hash: Some(TxHash::random()),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// Assert that no transaction without a matching request is accepted
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Unknown transaction id");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_send_transaction_invalid_response_format() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Simulate the wallet sending a response with an invalid UUID
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.body(
r#"{
"id": "invalid-uuid",
"hash": "invalid-hash",
"error": null
}"#,
)
.header("Content-Type", "application/json")
.send()
.await
.unwrap();
// The server should respond with a 422 Unprocessable Entity status
assert_eq!(resp.status(), reqwest::StatusCode::UNPROCESSABLE_ENTITY);
}
#[tokio::test]
async fn test_send_transaction_client_reject() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Create a browser transaction request
let (tx_request_id, tx_request) = create_browser_transaction_request();
// Spawn the transaction signing flow in the background
let handle = wait_for_transaction_signing(&server, tx_request).await;
// Check transaction request
check_transaction_request_content(&client, &server, tx_request_id).await;
// Simulate the wallet rejecting the tx
let resp = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id,
hash: None,
error: Some("User rejected the transaction".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// The join handle should now return a rejection error
let res = handle.await.expect("task panicked");
match res {
Err(BrowserWalletError::Rejected { operation, reason }) => {
assert_eq!(operation, "Transaction");
assert_eq!(reason, "User rejected the transaction");
}
other => panic!("expected rejection, got {other:?}"),
}
}
#[tokio::test]
async fn test_send_multiple_transaction_requests() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Create multiple browser transaction requests
let (tx_request_id1, tx_request1) = create_browser_transaction_request();
let (tx_request_id2, tx_request2) = create_different_browser_transaction_request();
// Spawn signing flows for both transactions concurrently
let handle1 = wait_for_transaction_signing(&server, tx_request1.clone()).await;
let handle2 = wait_for_transaction_signing(&server, tx_request2.clone()).await;
// Check first transaction request
{
let resp = client
.get(format!("http://localhost:{}/api/transaction/request", server.port()))
.send()
.await
.unwrap();
let BrowserApiResponse::Ok(pending_tx) =
resp.json::<BrowserApiResponse<BrowserTransactionRequest>>().await.unwrap()
else {
panic!("expected BrowserApiResponse::Ok with a pending transaction");
};
assert_eq!(
pending_tx.id, tx_request_id1,
"expected the first transaction to be at the front of the queue"
);
assert_eq!(pending_tx.request.from, tx_request1.request.from);
assert_eq!(pending_tx.request.to, tx_request1.request.to);
assert_eq!(pending_tx.request.value, tx_request1.request.value);
}
// Simulate the wallet accepting and signing the first transaction
let resp1 = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id1,
hash: Some(TxHash::random()),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp1.status(), reqwest::StatusCode::OK);
let res1 = handle1.await.expect("first signing flow panicked");
match res1 {
Ok(hash) => assert!(!hash.is_zero(), "first tx hash should not be zero"),
other => panic!("expected success, got {other:?}"),
}
// Check second transaction request
{
let resp = client
.get(format!("http://localhost:{}/api/transaction/request", server.port()))
.send()
.await
.unwrap();
let BrowserApiResponse::Ok(pending_tx) =
resp.json::<BrowserApiResponse<BrowserTransactionRequest>>().await.unwrap()
else {
panic!("expected BrowserApiResponse::Ok with a pending transaction");
};
assert_eq!(
pending_tx.id, tx_request_id2,
"expected the second transaction to be pending after the first one completed"
);
assert_eq!(pending_tx.request.from, tx_request2.request.from);
assert_eq!(pending_tx.request.to, tx_request2.request.to);
assert_eq!(pending_tx.request.value, tx_request2.request.value);
}
// Simulate the wallet rejecting the second transaction
let resp2 = client
.post(format!("http://localhost:{}/api/transaction/response", server.port()))
.json(&BrowserTransactionResponse {
id: tx_request_id2,
hash: None,
error: Some("User rejected the transaction".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp2.status(), reqwest::StatusCode::OK);
let res2 = handle2.await.expect("second signing flow panicked");
match res2 {
Err(BrowserWalletError::Rejected { operation, reason }) => {
assert_eq!(operation, "Transaction");
assert_eq!(reason, "User rejected the transaction");
}
other => panic!("expected BrowserWalletError::Rejected, got {other:?}"),
}
check_transaction_request_queue_empty(&client, &server).await;
server.stop().await.unwrap();
}
#[tokio::test]
async fn test_send_sign_response_both_signature_and_error_rejected() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (sign_request_id, sign_request) = create_browser_sign_request();
let _handle = wait_for_message_signing(&server, sign_request).await;
check_sign_request_content(&client, &server, sign_request_id).await;
// Both signature and error -> should be rejected
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id,
signature: Some(Bytes::from("Hello World")),
error: Some("Should not have both".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Only one of signature or error can be provided");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_send_sign_response_neither_hash_nor_error_rejected() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (sign_request_id, sign_request) = create_browser_sign_request();
let _handle = wait_for_message_signing(&server, sign_request).await;
check_sign_request_content(&client, &server, sign_request_id).await;
// Neither signature nor error -> rejected
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse { id: sign_request_id, signature: None, error: None })
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Either signature or error must be provided");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_send_sign_client_accept() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (sign_request_id, sign_request) = create_browser_sign_request();
let handle = wait_for_message_signing(&server, sign_request).await;
check_sign_request_content(&client, &server, sign_request_id).await;
// Simulate the wallet accepting and signing the message
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id,
signature: Some(Bytes::from("FakeSignature")),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// The join handle should now return the signature
let res = handle.await.expect("task panicked");
match res {
Ok(signature) => {
assert_eq!(signature, Bytes::from("FakeSignature"));
}
other => panic!("expected success, got {other:?}"),
}
}
#[tokio::test]
async fn test_send_sign_client_not_requested() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Create a random signing response without a matching request
let sign_request_id = Uuid::new_v4();
// Simulate the wallet sending a response for an unknown request
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id,
signature: Some(Bytes::from("FakeSignature")),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// Assert that no signing response without a matching request is accepted
let api: BrowserApiResponse<()> = resp.json().await.unwrap();
match api {
BrowserApiResponse::Error { message } => {
assert_eq!(message, "Unknown signing request id");
}
_ => panic!("expected error response"),
}
}
#[tokio::test]
async fn test_send_sign_invalid_response_format() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Simulate the wallet sending a response with an invalid UUID
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.body(
r#"{
"id": "invalid-uuid",
"signature": "invalid-signature",
"error": null
}"#,
)
.header("Content-Type", "application/json")
.send()
.await
.unwrap();
// The server should respond with a 422 Unprocessable Entity status
assert_eq!(resp.status(), reqwest::StatusCode::UNPROCESSABLE_ENTITY);
}
#[tokio::test]
async fn test_send_sign_client_reject() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
let (sign_request_id, sign_request) = create_browser_sign_request();
let handle = wait_for_message_signing(&server, sign_request).await;
check_sign_request_content(&client, &server, sign_request_id).await;
// Simulate the wallet rejecting the signing request
let resp = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id,
signature: None,
error: Some("User rejected the signing request".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp.status(), reqwest::StatusCode::OK);
// The join handle should now return a rejection error
let res = handle.await.expect("task panicked");
match res {
Err(BrowserWalletError::Rejected { operation, reason }) => {
assert_eq!(operation, "Signing");
assert_eq!(reason, "User rejected the signing request");
}
other => panic!("expected rejection, got {other:?}"),
}
}
#[tokio::test]
async fn test_send_multiple_sign_requests() {
let mut server = create_server();
let client = client_with_token(&server);
server.start().await.unwrap();
connect_wallet(&client, &server, Connection::new(ALICE, 1)).await;
// Create multiple browser sign requests
let (sign_request_id1, sign_request1) = create_browser_sign_request();
let (sign_request_id2, sign_request2) = create_different_browser_sign_request();
// Spawn signing flows for both sign requests concurrently
let handle1 = wait_for_message_signing(&server, sign_request1.clone()).await;
let handle2 = wait_for_message_signing(&server, sign_request2.clone()).await;
// Check first sign request
{
let resp = client
.get(format!("http://localhost:{}/api/signing/request", server.port()))
.send()
.await
.unwrap();
let BrowserApiResponse::Ok(pending_sign) =
resp.json::<BrowserApiResponse<BrowserSignRequest>>().await.unwrap()
else {
panic!("expected BrowserApiResponse::Ok with a pending sign request");
};
assert_eq!(pending_sign.id, sign_request_id1);
assert_eq!(pending_sign.sign_type, sign_request1.sign_type);
assert_eq!(pending_sign.request.address, sign_request1.request.address);
assert_eq!(pending_sign.request.message, sign_request1.request.message);
}
// Simulate the wallet accepting and signing the first sign request
let resp1 = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id1,
signature: Some(Bytes::from("Signature1")),
error: None,
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp1.status(), reqwest::StatusCode::OK);
let res1 = handle1.await.expect("first signing flow panicked");
match res1 {
Ok(signature) => assert_eq!(signature, Bytes::from("Signature1")),
other => panic!("expected success, got {other:?}"),
}
// Check second sign request
{
let resp = client
.get(format!("http://localhost:{}/api/signing/request", server.port()))
.send()
.await
.unwrap();
let BrowserApiResponse::Ok(pending_sign) =
resp.json::<BrowserApiResponse<BrowserSignRequest>>().await.unwrap()
else {
panic!("expected BrowserApiResponse::Ok with a pending sign request");
};
assert_eq!(pending_sign.id, sign_request_id2,);
assert_eq!(pending_sign.sign_type, sign_request2.sign_type);
assert_eq!(pending_sign.request.address, sign_request2.request.address);
assert_eq!(pending_sign.request.message, sign_request2.request.message);
}
// Simulate the wallet rejecting the second sign request
let resp2 = client
.post(format!("http://localhost:{}/api/signing/response", server.port()))
.json(&BrowserSignResponse {
id: sign_request_id2,
signature: None,
error: Some("User rejected the signing request".into()),
})
.send()
.await
.unwrap()
.error_for_status()
.unwrap();
assert_eq!(resp2.status(), reqwest::StatusCode::OK);
let res2 = handle2.await.expect("second signing flow panicked");
match res2 {
Err(BrowserWalletError::Rejected { operation, reason }) => {
assert_eq!(operation, "Signing");
assert_eq!(reason, "User rejected the signing request");
}
other => panic!("expected BrowserWalletError::Rejected, got {other:?}"),
}
check_sign_request_queue_empty(&client, &server).await;
server.stop().await.unwrap();
}
/// Helper to create a default browser wallet server.
fn create_server() -> BrowserWalletServer {
BrowserWalletServer::new(0, false, DEFAULT_TIMEOUT, DEFAULT_DEVELOPMENT)
}
/// Helper to create a reqwest client with the session token header.
fn client_with_token(server: &BrowserWalletServer) -> reqwest::Client {
let mut headers = HeaderMap::new();
headers.insert("X-Session-Token", HeaderValue::from_str(server.session_token()).unwrap());
reqwest::Client::builder().default_headers(headers).build().unwrap()
}
/// Helper to connect a wallet to the server.
async fn connect_wallet(
client: &reqwest::Client,
server: &BrowserWalletServer,
connection: Connection,
) {
let resp = client
.post(format!("http://localhost:{}/api/connection", server.port()))
.json(&connection)
.send();
assert!(resp.await.is_ok());
}
/// Helper to disconnect a wallet from the server.
async fn disconnect_wallet(client: &reqwest::Client, server: &BrowserWalletServer) {
let resp = client
.post(format!("http://localhost:{}/api/connection", server.port()))
.json(&Option::<Connection>::None)
.send();
assert!(resp.await.is_ok());
}
/// Spawn the transaction signing flow in the background and return the join handle.
async fn wait_for_transaction_signing(
server: &BrowserWalletServer,
tx_request: BrowserTransactionRequest,
) -> JoinHandle<Result<TxHash, BrowserWalletError>> {
// Spawn the signing flow in the background
let browser_server = server.clone();
let join_handle =
tokio::spawn(async move { browser_server.request_transaction(tx_request).await });
tokio::task::yield_now().await;
tokio::time::sleep(Duration::from_millis(100)).await;
join_handle
}
/// Spawn the message signing flow in the background and return the join handle.
async fn wait_for_message_signing(
server: &BrowserWalletServer,
sign_request: BrowserSignRequest,
) -> JoinHandle<Result<Bytes, BrowserWalletError>> {
// Spawn the signing flow in the background
let browser_server = server.clone();
let join_handle =
tokio::spawn(async move { browser_server.request_signing(sign_request).await });
tokio::task::yield_now().await;
tokio::time::sleep(Duration::from_millis(100)).await;
join_handle
}
/// Create a simple browser transaction request.
fn create_browser_transaction_request() -> (Uuid, BrowserTransactionRequest) {
let id = Uuid::new_v4();
let tx = BrowserTransactionRequest {
id,
request: TransactionRequest {
from: Some(ALICE),
to: Some(TxKind::Call(BOB)),
value: Some(U256::from(1000)),
..Default::default()
},
};
(id, tx)
}
/// Create a different browser transaction request (from the first one).
fn create_different_browser_transaction_request() -> (Uuid, BrowserTransactionRequest) {
let id = Uuid::new_v4();
let tx = BrowserTransactionRequest {
id,
request: TransactionRequest {
from: Some(BOB),
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/server.rs | crates/wallets/src/wallet_browser/server.rs | use std::{
net::SocketAddr,
sync::Arc,
time::{Duration, Instant},
};
use alloy_dyn_abi::TypedData;
use alloy_primitives::{Address, Bytes, TxHash};
use tokio::{
net::TcpListener,
sync::{Mutex, oneshot},
};
use uuid::Uuid;
use crate::wallet_browser::{
error::BrowserWalletError,
router::build_router,
state::BrowserWalletState,
types::{
BrowserSignRequest, BrowserSignTypedDataRequest, BrowserTransactionRequest, Connection,
SignRequest, SignType,
},
};
/// Browser wallet server.
#[derive(Debug, Clone)]
pub struct BrowserWalletServer {
port: u16,
state: Arc<BrowserWalletState>,
shutdown_tx: Option<Arc<Mutex<Option<oneshot::Sender<()>>>>>,
open_browser: bool,
timeout: Duration,
}
impl BrowserWalletServer {
/// Create a new browser wallet server.
pub fn new(port: u16, open_browser: bool, timeout: Duration, development: bool) -> Self {
Self {
port,
state: Arc::new(BrowserWalletState::new(Uuid::new_v4().to_string(), development)),
shutdown_tx: None,
open_browser,
timeout,
}
}
/// Start the server and open browser.
pub async fn start(&mut self) -> Result<(), BrowserWalletError> {
let router = build_router(self.state.clone(), self.port).await;
let addr = SocketAddr::from(([127, 0, 0, 1], self.port));
let listener = TcpListener::bind(addr)
.await
.map_err(|e| BrowserWalletError::ServerError(e.to_string()))?;
self.port = listener.local_addr().unwrap().port();
let (shutdown_tx, shutdown_rx) = oneshot::channel();
self.shutdown_tx = Some(Arc::new(Mutex::new(Some(shutdown_tx))));
tokio::spawn(async move {
let server = axum::serve(listener, router);
let _ = server
.with_graceful_shutdown(async {
let _ = shutdown_rx.await;
})
.await;
});
if self.open_browser {
webbrowser::open(&format!("http://127.0.0.1:{}", self.port)).map_err(|e| {
BrowserWalletError::ServerError(format!("Failed to open browser: {e}"))
})?;
}
Ok(())
}
/// Stop the server.
pub async fn stop(&mut self) -> Result<(), BrowserWalletError> {
if let Some(shutdown_arc) = self.shutdown_tx.take()
&& let Some(tx) = shutdown_arc.lock().await.take()
{
let _ = tx.send(());
}
Ok(())
}
/// Get the server port.
pub fn port(&self) -> u16 {
self.port
}
/// Check if the browser should be opened.
pub fn open_browser(&self) -> bool {
self.open_browser
}
/// Get the timeout duration.
pub fn timeout(&self) -> Duration {
self.timeout
}
/// Get the session token.
pub fn session_token(&self) -> &str {
self.state.session_token()
}
/// Check if a wallet is connected.
pub async fn is_connected(&self) -> bool {
self.state.is_connected().await
}
/// Get current wallet connection.
pub async fn get_connection(&self) -> Option<Connection> {
self.state.get_connection().await
}
/// Request a transaction to be signed and sent via the browser wallet.
pub async fn request_transaction(
&self,
request: BrowserTransactionRequest,
) -> Result<TxHash, BrowserWalletError> {
if !self.is_connected().await {
return Err(BrowserWalletError::NotConnected);
}
let tx_id = request.id;
self.state.add_transaction_request(request).await;
let start = Instant::now();
loop {
if let Some(response) = self.state.get_transaction_response(&tx_id).await {
if let Some(hash) = response.hash {
return Ok(hash);
} else if let Some(error) = response.error {
return Err(BrowserWalletError::Rejected {
operation: "Transaction",
reason: error,
});
} else {
return Err(BrowserWalletError::ServerError(
"Transaction response missing both hash and error".to_string(),
));
}
}
if start.elapsed() > self.timeout {
self.state.remove_transaction_request(&tx_id).await;
return Err(BrowserWalletError::Timeout { operation: "Transaction" });
}
tokio::time::sleep(Duration::from_millis(100)).await;
}
}
/// Request a message to be signed via the browser wallet.
pub async fn request_signing(
&self,
request: BrowserSignRequest,
) -> Result<Bytes, BrowserWalletError> {
if !self.is_connected().await {
return Err(BrowserWalletError::NotConnected);
}
let tx_id = request.id;
self.state.add_signing_request(request).await;
let start = Instant::now();
loop {
if let Some(response) = self.state.get_signing_response(&tx_id).await {
if let Some(signature) = response.signature {
return Ok(signature);
} else if let Some(error) = response.error {
return Err(BrowserWalletError::Rejected {
operation: "Signing",
reason: error,
});
} else {
return Err(BrowserWalletError::ServerError(
"Signing response missing both signature and error".to_string(),
));
}
}
if start.elapsed() > self.timeout {
self.state.remove_signing_request(&tx_id).await;
return Err(BrowserWalletError::Timeout { operation: "Signing" });
}
tokio::time::sleep(Duration::from_millis(100)).await;
}
}
/// Request EIP-712 typed data signing via the browser wallet.
pub async fn request_typed_data_signing(
&self,
address: Address,
typed_data: TypedData,
) -> Result<Bytes, BrowserWalletError> {
let request = BrowserSignTypedDataRequest { id: Uuid::new_v4(), address, typed_data };
let sign_request = BrowserSignRequest {
id: request.id,
sign_type: SignType::SignTypedDataV4,
request: SignRequest {
message: serde_json::to_string(&request.typed_data).map_err(|e| {
BrowserWalletError::ServerError(format!("Failed to serialize typed data: {e}"))
})?,
address: request.address,
},
};
self.request_signing(sign_request).await
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/queue.rs | crates/wallets/src/wallet_browser/queue.rs | use std::collections::{HashMap, VecDeque};
use uuid::Uuid;
use crate::wallet_browser::types::{BrowserSignRequest, BrowserTransactionRequest};
#[derive(Debug)]
pub(crate) struct RequestQueue<Req, Res> {
/// Pending requests from CLI to browser
requests: VecDeque<Req>,
/// Responses from browser indexed by request ID
responses: HashMap<Uuid, Res>,
}
impl<Req, Res> Default for RequestQueue<Req, Res> {
fn default() -> Self {
Self::new()
}
}
impl<Req, Res> RequestQueue<Req, Res> {
/// Create a new request queue.
pub fn new() -> Self {
Self { requests: VecDeque::new(), responses: HashMap::new() }
}
/// Add a new request to the queue.
pub fn add_request(&mut self, request: Req) {
self.requests.push_back(request);
}
/// Check if the queue contains any pending requests matching the given ID.
pub fn has_request(&self, id: &Uuid) -> bool
where
Req: HasId,
{
self.requests.iter().any(|r| r.id() == id)
}
/// Read the next request from the queue without removing it.
pub fn read_request(&self) -> Option<&Req> {
self.requests.front()
}
/// Remove a request by its ID.
pub fn remove_request(&mut self, id: &Uuid) -> Option<Req>
where
Req: HasId,
{
if let Some(pos) = self.requests.iter().position(|r| r.id() == id) {
self.requests.remove(pos)
} else {
None
}
}
/// Add a response to the queue.
pub fn add_response(&mut self, id: Uuid, response: Res) {
self.responses.insert(id, response);
}
/// Get a response by its ID, removing it from the queue.
pub fn get_response(&mut self, id: &Uuid) -> Option<Res> {
self.responses.remove(id)
}
}
pub(crate) trait HasId {
fn id(&self) -> &Uuid;
}
impl HasId for BrowserTransactionRequest {
fn id(&self) -> &Uuid {
&self.id
}
}
impl HasId for BrowserSignRequest {
fn id(&self) -> &Uuid {
&self.id
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_browser/app/mod.rs | crates/wallets/src/wallet_browser/app/mod.rs | pub(crate) mod contents {
pub const INDEX_HTML: &str = include_str!("assets/index.html");
pub const STYLES_CSS: &str = include_str!("assets/styles.css");
pub const MAIN_JS: &str = include_str!("assets/main.js");
pub const BANNER_PNG: &[u8] = include_bytes!("assets/banner.png");
pub const LOGO_PNG: &[u8] = include_bytes!("assets/logo.png");
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_multi/mod.rs | crates/wallets/src/wallet_multi/mod.rs | use crate::{
signer::{PendingSigner, WalletSigner},
utils,
};
use alloy_primitives::map::AddressHashMap;
use alloy_signer::Signer;
use clap::Parser;
use derive_builder::Builder;
use eyre::Result;
use foundry_config::Config;
use serde::Serialize;
use std::path::PathBuf;
/// Container for multiple wallets.
#[derive(Debug, Default)]
pub struct MultiWallet {
/// Vector of wallets that require an action to be unlocked.
/// Those are lazily unlocked on the first access of the signers.
pending_signers: Vec<PendingSigner>,
/// Contains unlocked signers.
signers: AddressHashMap<WalletSigner>,
}
impl MultiWallet {
pub fn new(pending_signers: Vec<PendingSigner>, signers: Vec<WalletSigner>) -> Self {
let signers = signers.into_iter().map(|signer| (signer.address(), signer)).collect();
Self { pending_signers, signers }
}
fn maybe_unlock_pending(&mut self) -> Result<()> {
for pending in self.pending_signers.drain(..) {
let signer = pending.unlock()?;
self.signers.insert(signer.address(), signer);
}
Ok(())
}
pub fn signers(&mut self) -> Result<&AddressHashMap<WalletSigner>> {
self.maybe_unlock_pending()?;
Ok(&self.signers)
}
pub fn into_signers(mut self) -> Result<AddressHashMap<WalletSigner>> {
self.maybe_unlock_pending()?;
Ok(self.signers)
}
pub fn add_signer(&mut self, signer: WalletSigner) {
self.signers.insert(signer.address(), signer);
}
}
/// A macro that initializes multiple wallets
///
/// Should be used with a [`MultiWallet`] instance
macro_rules! create_hw_wallets {
($self:ident, $create_signer:expr, $signers:ident) => {
let mut $signers = vec![];
if let Some(hd_paths) = &$self.hd_paths {
for path in hd_paths {
let hw = $create_signer(Some(path), 0).await?;
$signers.push(hw);
}
}
if let Some(mnemonic_indexes) = &$self.mnemonic_indexes {
for index in mnemonic_indexes {
let hw = $create_signer(None, *index).await?;
$signers.push(hw);
}
}
if $signers.is_empty() {
let hw = $create_signer(None, 0).await?;
$signers.push(hw);
}
};
}
/// The wallet options can either be:
/// 1. Ledger
/// 2. Trezor
/// 3. Mnemonics (via file path)
/// 4. Keystores (via file path)
/// 5. Private Keys (cleartext in CLI)
/// 6. Private Keys (interactively via secure prompt)
/// 7. AWS KMS
/// 8. Turnkey
#[derive(Builder, Clone, Debug, Default, Serialize, Parser)]
#[command(next_help_heading = "Wallet options", about = None, long_about = None)]
pub struct MultiWalletOpts {
/// Open an interactive prompt to enter your private key.
///
/// Takes a value for the number of keys to enter.
#[arg(long, help_heading = "Wallet options - raw", default_value = "0", value_name = "NUM")]
pub interactives: u32,
/// Open an interactive prompt to enter your private key.
#[arg(long, short, help_heading = "Wallet options - raw", conflicts_with = "interactives")]
pub interactive: bool,
/// Use the provided private keys.
#[arg(long, help_heading = "Wallet options - raw", value_name = "RAW_PRIVATE_KEYS")]
#[builder(default = "None")]
pub private_keys: Option<Vec<String>>,
/// Use the provided private key.
#[arg(
long,
help_heading = "Wallet options - raw",
conflicts_with = "private_keys",
value_name = "RAW_PRIVATE_KEY"
)]
#[builder(default = "None")]
pub private_key: Option<String>,
/// Use the mnemonic phrases of mnemonic files at the specified paths.
#[arg(long, alias = "mnemonic-paths", help_heading = "Wallet options - raw")]
#[builder(default = "None")]
pub mnemonics: Option<Vec<String>>,
/// Use a BIP39 passphrases for the mnemonic.
#[arg(long, help_heading = "Wallet options - raw", value_name = "PASSPHRASE")]
#[builder(default = "None")]
pub mnemonic_passphrases: Option<Vec<String>>,
/// The wallet derivation path.
///
/// Works with both --mnemonic-path and hardware wallets.
#[arg(
long = "mnemonic-derivation-paths",
alias = "hd-paths",
help_heading = "Wallet options - raw",
value_name = "PATH"
)]
#[builder(default = "None")]
pub hd_paths: Option<Vec<String>>,
/// Use the private key from the given mnemonic index.
///
/// Can be used with --mnemonics, --ledger, --aws and --trezor.
#[arg(
long,
conflicts_with = "hd_paths",
help_heading = "Wallet options - raw",
default_value = "0",
value_name = "INDEXES"
)]
pub mnemonic_indexes: Option<Vec<u32>>,
/// Use the keystore by its filename in the given folder.
#[arg(
long = "keystore",
visible_alias = "keystores",
help_heading = "Wallet options - keystore",
value_name = "PATHS",
env = "ETH_KEYSTORE"
)]
#[builder(default = "None")]
pub keystore_paths: Option<Vec<String>>,
/// Use a keystore from the default keystores folder (~/.foundry/keystores) by its filename.
#[arg(
long = "account",
visible_alias = "accounts",
help_heading = "Wallet options - keystore",
value_name = "ACCOUNT_NAMES",
env = "ETH_KEYSTORE_ACCOUNT",
conflicts_with = "keystore_paths"
)]
#[builder(default = "None")]
pub keystore_account_names: Option<Vec<String>>,
/// The keystore password.
///
/// Used with --keystore.
#[arg(
long = "password",
help_heading = "Wallet options - keystore",
requires = "keystore_paths",
value_name = "PASSWORDS"
)]
#[builder(default = "None")]
pub keystore_passwords: Option<Vec<String>>,
/// The keystore password file path.
///
/// Used with --keystore.
#[arg(
long = "password-file",
help_heading = "Wallet options - keystore",
requires = "keystore_paths",
value_name = "PATHS",
env = "ETH_PASSWORD"
)]
#[builder(default = "None")]
pub keystore_password_files: Option<Vec<String>>,
/// Use a Ledger hardware wallet.
#[arg(long, short, help_heading = "Wallet options - hardware wallet")]
pub ledger: bool,
/// Use a Trezor hardware wallet.
#[arg(long, short, help_heading = "Wallet options - hardware wallet")]
pub trezor: bool,
/// Use AWS Key Management Service.
///
/// Ensure either one of AWS_KMS_KEY_IDS (comma-separated) or AWS_KMS_KEY_ID environment
/// variables are set.
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "aws-kms"))]
pub aws: bool,
/// Use Google Cloud Key Management Service.
///
/// Ensure the following environment variables are set: GCP_PROJECT_ID, GCP_LOCATION,
/// GCP_KEY_RING, GCP_KEY_NAME, GCP_KEY_VERSION.
///
/// See: <https://cloud.google.com/kms/docs>
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "gcp-kms"))]
pub gcp: bool,
/// Use Turnkey.
///
/// Ensure the following environment variables are set: TURNKEY_API_PRIVATE_KEY,
/// TURNKEY_ORGANIZATION_ID, TURNKEY_ADDRESS.
///
/// See: <https://docs.turnkey.com/getting-started/quickstart>
#[arg(long, help_heading = "Wallet options - remote", hide = !cfg!(feature = "turnkey"))]
pub turnkey: bool,
}
impl MultiWalletOpts {
/// Returns [MultiWallet] container configured with provided options.
pub async fn get_multi_wallet(&self) -> Result<MultiWallet> {
let mut pending = Vec::new();
let mut signers: Vec<WalletSigner> = Vec::new();
if let Some(ledgers) = self.ledgers().await? {
signers.extend(ledgers);
}
if let Some(trezors) = self.trezors().await? {
signers.extend(trezors);
}
if let Some(aws_signers) = self.aws_signers().await? {
signers.extend(aws_signers);
}
if let Some(gcp_signer) = self.gcp_signers().await? {
signers.extend(gcp_signer);
}
if let Some(turnkey_signers) = self.turnkey_signers()? {
signers.extend(turnkey_signers);
}
if let Some((pending_keystores, unlocked)) = self.keystores()? {
pending.extend(pending_keystores);
signers.extend(unlocked);
}
if let Some(pks) = self.private_keys()? {
signers.extend(pks);
}
if let Some(mnemonics) = self.mnemonics()? {
signers.extend(mnemonics);
}
if self.interactive {
pending.push(PendingSigner::Interactive);
}
if self.interactives > 0 {
pending.extend(std::iter::repeat_n(
PendingSigner::Interactive,
self.interactives as usize,
));
}
Ok(MultiWallet::new(pending, signers))
}
pub fn private_keys(&self) -> Result<Option<Vec<WalletSigner>>> {
let mut pks = vec![];
if let Some(private_key) = &self.private_key {
pks.push(private_key);
}
if let Some(private_keys) = &self.private_keys {
for pk in private_keys {
pks.push(pk);
}
}
if !pks.is_empty() {
let wallets = pks
.into_iter()
.map(|pk| utils::create_private_key_signer(pk))
.collect::<Result<Vec<_>>>()?;
Ok(Some(wallets))
} else {
Ok(None)
}
}
fn keystore_paths(&self) -> Result<Option<Vec<PathBuf>>> {
if let Some(keystore_paths) = &self.keystore_paths {
return Ok(Some(keystore_paths.iter().map(PathBuf::from).collect()));
}
if let Some(keystore_account_names) = &self.keystore_account_names {
let default_keystore_dir = Config::foundry_keystores_dir()
.ok_or_else(|| eyre::eyre!("Could not find the default keystore directory."))?;
return Ok(Some(
keystore_account_names
.iter()
.map(|keystore_name| default_keystore_dir.join(keystore_name))
.collect(),
));
}
Ok(None)
}
/// Returns all wallets read from the provided keystores arguments
///
/// Returns `Ok(None)` if no keystore provided.
pub fn keystores(&self) -> Result<Option<(Vec<PendingSigner>, Vec<WalletSigner>)>> {
if let Some(keystore_paths) = self.keystore_paths()? {
let mut pending = Vec::new();
let mut signers = Vec::new();
let mut passwords_iter =
self.keystore_passwords.iter().flat_map(|passwords| passwords.iter());
let mut password_files_iter = self
.keystore_password_files
.iter()
.flat_map(|password_files| password_files.iter());
for path in &keystore_paths {
let (maybe_signer, maybe_pending) = utils::create_keystore_signer(
path,
passwords_iter.next().map(|password| password.as_str()),
password_files_iter.next().map(|password_file| password_file.as_str()),
)?;
if let Some(pending_signer) = maybe_pending {
pending.push(pending_signer);
} else if let Some(signer) = maybe_signer {
signers.push(signer);
}
}
return Ok(Some((pending, signers)));
}
Ok(None)
}
pub fn mnemonics(&self) -> Result<Option<Vec<WalletSigner>>> {
if let Some(ref mnemonics) = self.mnemonics {
let mut wallets = vec![];
let mut hd_paths_iter =
self.hd_paths.iter().flat_map(|paths| paths.iter().map(String::as_str));
let mut passphrases_iter = self
.mnemonic_passphrases
.iter()
.flat_map(|passphrases| passphrases.iter().map(String::as_str));
let mut indexes_iter =
self.mnemonic_indexes.iter().flat_map(|indexes| indexes.iter().copied());
for mnemonic in mnemonics {
let wallet = utils::create_mnemonic_signer(
mnemonic,
passphrases_iter.next(),
hd_paths_iter.next(),
indexes_iter.next().unwrap_or(0),
)?;
wallets.push(wallet);
}
return Ok(Some(wallets));
}
Ok(None)
}
pub async fn ledgers(&self) -> Result<Option<Vec<WalletSigner>>> {
if self.ledger {
let mut args = self.clone();
if let Some(paths) = &args.hd_paths {
if paths.len() > 1 {
eyre::bail!("Ledger only supports one signer.");
}
args.mnemonic_indexes = None;
}
create_hw_wallets!(args, utils::create_ledger_signer, wallets);
return Ok(Some(wallets));
}
Ok(None)
}
pub async fn trezors(&self) -> Result<Option<Vec<WalletSigner>>> {
if self.trezor {
let mut args = self.clone();
if args.hd_paths.is_some() {
args.mnemonic_indexes = None;
}
create_hw_wallets!(args, utils::create_trezor_signer, wallets);
return Ok(Some(wallets));
}
Ok(None)
}
pub async fn aws_signers(&self) -> Result<Option<Vec<WalletSigner>>> {
#[cfg(feature = "aws-kms")]
if self.aws {
let mut wallets = vec![];
let aws_keys = std::env::var("AWS_KMS_KEY_IDS")
.or(std::env::var("AWS_KMS_KEY_ID"))?
.split(',')
.map(|k| k.to_string())
.collect::<Vec<_>>();
for key in aws_keys {
let aws_signer = WalletSigner::from_aws(key).await?;
wallets.push(aws_signer)
}
return Ok(Some(wallets));
}
Ok(None)
}
/// Returns a list of GCP signers if the GCP flag is set.
///
/// The GCP signers are created from the following environment variables:
/// - GCP_PROJECT_ID: The GCP project ID. e.g. `my-project-123456`.
/// - GCP_LOCATION: The GCP location. e.g. `us-central1`.
/// - GCP_KEY_RING: The GCP key ring name. e.g. `my-key-ring`.
/// - GCP_KEY_NAME: The GCP key name. e.g. `my-key`.
/// - GCP_KEY_VERSION: The GCP key version. e.g. `1`.
///
/// For more information on GCP KMS, see the [official documentation](https://cloud.google.com/kms/docs).
pub async fn gcp_signers(&self) -> Result<Option<Vec<WalletSigner>>> {
#[cfg(feature = "gcp-kms")]
if self.gcp {
let mut wallets = vec![];
let project_id = std::env::var("GCP_PROJECT_ID")?;
let location = std::env::var("GCP_LOCATION")?;
let key_ring = std::env::var("GCP_KEY_RING")?;
let key_name = std::env::var("GCP_KEY_NAME")?;
let key_version = std::env::var("GCP_KEY_VERSION")?;
let gcp_signer = WalletSigner::from_gcp(
project_id,
location,
key_ring,
key_name,
key_version.parse()?,
)
.await?;
wallets.push(gcp_signer);
return Ok(Some(wallets));
}
Ok(None)
}
pub fn turnkey_signers(&self) -> Result<Option<Vec<WalletSigner>>> {
#[cfg(feature = "turnkey")]
if self.turnkey {
let api_private_key = std::env::var("TURNKEY_API_PRIVATE_KEY")?;
let organization_id = std::env::var("TURNKEY_ORGANIZATION_ID")?;
let address = std::env::var("TURNKEY_ADDRESS")?.parse()?;
let signer = WalletSigner::from_turnkey(api_private_key, organization_id, address)?;
return Ok(Some(vec![signer]));
}
Ok(None)
}
}
#[cfg(test)]
mod tests {
use super::*;
use alloy_primitives::address;
use std::path::Path;
#[test]
fn parse_keystore_args() {
let args: MultiWalletOpts =
MultiWalletOpts::parse_from(["foundry-cli", "--keystores", "my/keystore/path"]);
assert_eq!(args.keystore_paths, Some(vec!["my/keystore/path".to_string()]));
unsafe {
std::env::set_var("ETH_KEYSTORE", "MY_KEYSTORE");
}
let args: MultiWalletOpts = MultiWalletOpts::parse_from(["foundry-cli"]);
assert_eq!(args.keystore_paths, Some(vec!["MY_KEYSTORE".to_string()]));
unsafe {
std::env::remove_var("ETH_KEYSTORE");
}
}
#[test]
fn parse_keystore_password_file() {
let keystore =
Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/../cast/tests/fixtures/keystore"));
let keystore_file = keystore
.join("UTC--2022-12-20T10-30-43.591916000Z--ec554aeafe75601aaab43bd4621a22284db566c2");
let keystore_password_file = keystore.join("password-ec554").into_os_string();
let args: MultiWalletOpts = MultiWalletOpts::parse_from([
"foundry-cli",
"--keystores",
keystore_file.to_str().unwrap(),
"--password-file",
keystore_password_file.to_str().unwrap(),
]);
assert_eq!(
args.keystore_password_files,
Some(vec![keystore_password_file.to_str().unwrap().to_string()])
);
let (_, unlocked) = args.keystores().unwrap().unwrap();
assert_eq!(unlocked.len(), 1);
assert_eq!(unlocked[0].address(), address!("0xec554aeafe75601aaab43bd4621a22284db566c2"));
}
// https://github.com/foundry-rs/foundry/issues/5179
#[test]
fn should_not_require_the_mnemonics_flag_with_mnemonic_indexes() {
let wallet_options = vec![
("ledger", "--mnemonic-indexes", 1),
("trezor", "--mnemonic-indexes", 2),
("aws", "--mnemonic-indexes", 10),
("turnkey", "--mnemonic-indexes", 11),
];
for test_case in wallet_options {
let args: MultiWalletOpts = MultiWalletOpts::parse_from([
"foundry-cli",
&format!("--{}", test_case.0),
test_case.1,
&test_case.2.to_string(),
]);
match test_case.0 {
"ledger" => assert!(args.ledger),
"trezor" => assert!(args.trezor),
"aws" => assert!(args.aws),
"turnkey" => assert!(args.turnkey),
_ => panic!("Should have matched one of the previous wallet options"),
}
assert_eq!(
args.mnemonic_indexes.expect("--mnemonic-indexes should have been set")[0],
test_case.2
)
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/wallets/src/wallet_raw/mod.rs | crates/wallets/src/wallet_raw/mod.rs | use crate::{PendingSigner, WalletSigner, utils};
use clap::Parser;
use eyre::Result;
use serde::Serialize;
/// A wrapper for the raw data options for `Wallet`, extracted to also be used standalone.
/// The raw wallet options can either be:
/// 1. Private Key (cleartext in CLI)
/// 2. Private Key (interactively via secure prompt)
/// 3. Mnemonic (via file path)
#[derive(Clone, Debug, Default, Serialize, Parser)]
#[command(next_help_heading = "Wallet options - raw", about = None, long_about = None)]
pub struct RawWalletOpts {
/// Open an interactive prompt to enter your private key.
#[arg(long, short)]
pub interactive: bool,
/// Use the provided private key.
#[arg(long, value_name = "RAW_PRIVATE_KEY")]
pub private_key: Option<String>,
/// Use the mnemonic phrase of mnemonic file at the specified path.
#[arg(long, alias = "mnemonic-path")]
pub mnemonic: Option<String>,
/// Use a BIP39 passphrase for the mnemonic.
#[arg(long, value_name = "PASSPHRASE")]
pub mnemonic_passphrase: Option<String>,
/// The wallet derivation path.
///
/// Works with both --mnemonic-path and hardware wallets.
#[arg(long = "mnemonic-derivation-path", alias = "hd-path", value_name = "PATH")]
pub hd_path: Option<String>,
/// Use the private key from the given mnemonic index.
///
/// Used with --mnemonic-path.
#[arg(long, conflicts_with = "hd_path", default_value_t = 0, value_name = "INDEX")]
pub mnemonic_index: u32,
}
impl RawWalletOpts {
/// Returns signer configured by provided parameters.
pub fn signer(&self) -> Result<Option<WalletSigner>> {
if self.interactive {
return Ok(Some(PendingSigner::Interactive.unlock()?));
}
if let Some(private_key) = &self.private_key {
return Ok(Some(utils::create_private_key_signer(private_key)?));
}
if let Some(mnemonic) = &self.mnemonic {
return Ok(Some(utils::create_mnemonic_signer(
mnemonic,
self.mnemonic_passphrase.as_deref(),
self.hd_path.as_deref(),
self.mnemonic_index,
)?));
}
Ok(None)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/lib.rs | crates/lint/src/lib.rs | #![doc = include_str!("../README.md")]
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![allow(elided_lifetimes_in_paths)]
pub mod linter;
pub mod sol;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/linter/early.rs | crates/lint/src/linter/early.rs | use super::LintContext;
use solar::{
ast::{self as ast, visit::Visit},
interface::data_structures::Never,
};
use std::ops::ControlFlow;
/// Trait for lints that operate directly on the AST.
/// Its methods mirror `ast::visit::Visit`, with the addition of `LintCotext`.
pub trait EarlyLintPass<'ast>: Send + Sync {
fn check_expr(&mut self, _ctx: &LintContext, _expr: &'ast ast::Expr<'ast>) {}
fn check_item_struct(&mut self, _ctx: &LintContext, _struct: &'ast ast::ItemStruct<'ast>) {}
fn check_item_function(&mut self, _ctx: &LintContext, _func: &'ast ast::ItemFunction<'ast>) {}
fn check_variable_definition(
&mut self,
_ctx: &LintContext,
_var: &'ast ast::VariableDefinition<'ast>,
) {
}
fn check_import_directive(
&mut self,
_ctx: &LintContext,
_import: &'ast ast::ImportDirective<'ast>,
) {
}
fn check_using_directive(
&mut self,
_ctx: &LintContext,
_using: &'ast ast::UsingDirective<'ast>,
) {
}
fn check_item_contract(
&mut self,
_ctx: &LintContext,
_contract: &'ast ast::ItemContract<'ast>,
) {
}
fn check_doc_comment(&mut self, _ctx: &LintContext, _cmnt: &'ast ast::DocComment) {}
// TODO: Add methods for each required AST node type
/// Should be called after the source unit has been visited. Enables lints that require
/// knowledge of the entire AST to perform their analysis.
///
/// # Performance
///
/// Since a full-AST analysis can be computationally expensive, implementations
/// should guard their logic by first checking if the relevant lint is enabled
/// using [`LintContext::is_lint_enabled`]. This avoids performing costly work
/// if the user has disabled the lint.
///
/// ### Example
/// ```rust,ignore
/// fn check_full_source_unit(&mut self, ctx: &LintContext<'ast, '_>, ast: &'ast ast::SourceUnit<'ast>) {
/// // Check if the lint is enabled before performing expensive work.
/// if ctx.is_lint_enabled(MY_EXPENSIVE_LINT.id) {
/// // ... perform computation and emit diagnostics ...
/// }
/// }
/// ```
fn check_full_source_unit(
&mut self,
_ctx: &LintContext<'ast, '_>,
_ast: &'ast ast::SourceUnit<'ast>,
) {
}
}
/// Visitor struct for `EarlyLintPass`es
pub struct EarlyLintVisitor<'a, 's, 'ast> {
pub ctx: &'a LintContext<'s, 'a>,
pub passes: &'a mut [Box<dyn EarlyLintPass<'ast> + 's>],
}
impl<'a, 's, 'ast> EarlyLintVisitor<'a, 's, 'ast>
where
's: 'ast,
{
pub fn new(
ctx: &'a LintContext<'s, 'a>,
passes: &'a mut [Box<dyn EarlyLintPass<'ast> + 's>],
) -> Self {
Self { ctx, passes }
}
/// Extends the [`Visit`] trait functionality with a hook that can run after the initial
/// traversal.
pub fn post_source_unit(&mut self, ast: &'ast ast::SourceUnit<'ast>) {
for pass in self.passes.iter_mut() {
pass.check_full_source_unit(self.ctx, ast);
}
}
}
impl<'s, 'ast> Visit<'ast> for EarlyLintVisitor<'_, 's, 'ast>
where
's: 'ast,
{
type BreakValue = Never;
fn visit_doc_comment(&mut self, cmnt: &'ast ast::DocComment) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_doc_comment(self.ctx, cmnt)
}
self.walk_doc_comment(cmnt)
}
fn visit_expr(&mut self, expr: &'ast ast::Expr<'ast>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_expr(self.ctx, expr)
}
self.walk_expr(expr)
}
fn visit_variable_definition(
&mut self,
var: &'ast ast::VariableDefinition<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_variable_definition(self.ctx, var)
}
self.walk_variable_definition(var)
}
fn visit_item_struct(
&mut self,
strukt: &'ast ast::ItemStruct<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_item_struct(self.ctx, strukt)
}
self.walk_item_struct(strukt)
}
fn visit_item_function(
&mut self,
func: &'ast ast::ItemFunction<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_item_function(self.ctx, func)
}
self.walk_item_function(func)
}
fn visit_import_directive(
&mut self,
import: &'ast ast::ImportDirective<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_import_directive(self.ctx, import);
}
self.walk_import_directive(import)
}
fn visit_using_directive(
&mut self,
using: &'ast ast::UsingDirective<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_using_directive(self.ctx, using);
}
self.walk_using_directive(using)
}
fn visit_item_contract(
&mut self,
contract: &'ast ast::ItemContract<'ast>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_item_contract(self.ctx, contract);
}
self.walk_item_contract(contract)
}
// TODO: Add methods for each required AST node type, mirroring `solar::ast::visit::Visit`
// method sigs + adding `LintContext`
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/linter/mod.rs | crates/lint/src/linter/mod.rs | mod early;
mod late;
pub use early::{EarlyLintPass, EarlyLintVisitor};
pub use late::{LateLintPass, LateLintVisitor};
use foundry_common::comments::inline_config::InlineConfig;
use foundry_compilers::Language;
use foundry_config::{DenyLevel, lint::Severity};
use solar::{
interface::{
Session, Span,
diagnostics::{
Applicability, DiagBuilder, DiagId, DiagMsg, MultiSpan, Style, SuggestionStyle,
},
},
sema::Compiler,
};
use std::path::PathBuf;
/// Trait representing a generic linter for analyzing and reporting issues in smart contract source
/// code files.
///
/// A linter can be implemented for any smart contract language supported by Foundry.
pub trait Linter: Send + Sync {
/// The target [`Language`].
type Language: Language;
/// The [`Lint`] type.
type Lint: Lint;
/// Run all lints.
///
/// The `compiler` should have already been configured with all the sources necessary,
/// as well as having performed parsing and lowering.
///
/// Should return an error based on the configured [`DenyLevel`] and the emitted diagnostics.
fn lint(&self, input: &[PathBuf], deny: DenyLevel, compiler: &mut Compiler)
-> eyre::Result<()>;
}
pub trait Lint {
fn id(&self) -> &'static str;
fn severity(&self) -> Severity;
fn description(&self) -> &'static str;
fn help(&self) -> &'static str;
}
pub struct LintContext<'s, 'c> {
sess: &'s Session,
with_description: bool,
with_json_emitter: bool,
pub config: LinterConfig<'c>,
active_lints: Vec<&'static str>,
}
pub struct LinterConfig<'s> {
pub inline: &'s InlineConfig<Vec<String>>,
pub mixed_case_exceptions: &'s [String],
}
impl<'s, 'c> LintContext<'s, 'c> {
pub fn new(
sess: &'s Session,
with_description: bool,
with_json_emitter: bool,
config: LinterConfig<'c>,
active_lints: Vec<&'static str>,
) -> Self {
Self { sess, with_description, with_json_emitter, config, active_lints }
}
pub fn session(&self) -> &'s Session {
self.sess
}
// Helper method to check if a lint id is enabled.
//
// For performance reasons, some passes check several lints at once. Thus, this method is
// required to avoid unintended warnings.
pub fn is_lint_enabled(&self, id: &'static str) -> bool {
self.active_lints.contains(&id)
}
/// Helper method to emit diagnostics easily from passes
pub fn emit<L: Lint>(&self, lint: &'static L, span: Span) {
if self.config.inline.is_id_disabled(span, lint.id()) || !self.is_lint_enabled(lint.id()) {
return;
}
let desc = if self.with_description { lint.description() } else { "" };
let mut diag: DiagBuilder<'_, ()> = self
.sess
.dcx
.diag(lint.severity().into(), desc)
.code(DiagId::new_str(lint.id()))
.span(MultiSpan::from_span(span));
// Avoid ANSI characters when using a JSON emitter
if self.with_json_emitter {
diag = diag.help(lint.help());
} else {
diag = diag.help(hyperlink(lint.help()));
}
diag.emit();
}
/// Emit a diagnostic with a code suggestion.
///
/// If no span is provided for [`SuggestionKind::Fix`], it will use the lint's span.
pub fn emit_with_suggestion<L: Lint>(
&self,
lint: &'static L,
span: Span,
suggestion: Suggestion,
) {
if self.config.inline.is_id_disabled(span, lint.id()) || !self.is_lint_enabled(lint.id()) {
return;
}
let desc = if self.with_description { lint.description() } else { "" };
let mut diag: DiagBuilder<'_, ()> = self
.sess
.dcx
.diag(lint.severity().into(), desc)
.code(DiagId::new_str(lint.id()))
.span(MultiSpan::from_span(span));
diag = match suggestion.kind {
SuggestionKind::Fix { span: fix_span, applicability, style } => diag
.span_suggestion_with_style(
fix_span.unwrap_or(span),
suggestion.desc.unwrap_or_default(),
suggestion.content,
applicability,
style,
),
SuggestionKind::Example => {
if let Some(note) = suggestion.to_note() {
diag.note(note.iter().map(|l| l.0.as_str()).collect::<String>())
} else {
diag
}
}
};
// Avoid ANSI characters when using a JSON emitter
if self.with_json_emitter {
diag = diag.help(lint.help());
} else {
diag = diag.help(hyperlink(lint.help()));
}
diag.emit();
}
/// Gets the "raw" source code (snippet) of the given span.
pub fn span_to_snippet(&self, span: Span) -> Option<String> {
self.sess.source_map().span_to_snippet(span).ok()
}
/// Gets the number of leading whitespaces (indentation) of the line where the span begins.
pub fn get_span_indentation(&self, span: Span) -> usize {
if !span.is_dummy() {
// Get the line text and compute the indentation prior to the span's position.
let loc = self.sess.source_map().lookup_char_pos(span.lo());
if let Some(line_text) = loc.file.get_line(loc.line) {
let col_offset = loc.col.to_usize();
if col_offset <= line_text.len() {
let prev_text = &line_text[..col_offset];
return prev_text.len() - prev_text.trim().len();
}
}
}
0
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum SuggestionKind {
/// A standalone block of code. Used for showing examples without suggesting a fix.
///
/// Multi-line strings should include newlines.
Example,
/// A proposed code change, displayed as a diff. Used to suggest replacements, showing the code
/// to be removed (from `span`) and the code to be added (from `add`).
Fix {
/// The `Span` of the source code to be removed. Note that, if uninformed,
/// `fn emit_with_fix()` falls back to the lint span.
span: Option<Span>,
/// The applicability of the suggested fix.
applicability: Applicability,
/// The style of the suggested fix.
style: SuggestionStyle,
},
}
// An emittable diagnostic suggestion.
//
// Depending on its [`SuggestionKind`] will be emitted as a simple note (examples), or a fix
// suggestion.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Suggestion {
/// An optional description displayed above the code block.
desc: Option<&'static str>,
/// The actual suggestion.
content: String,
/// The suggestion type and its specific data.
kind: SuggestionKind,
}
impl Suggestion {
/// Creates a new [`SuggestionKind::Example`] suggestion.
pub fn example(content: String) -> Self {
Self { desc: None, content, kind: SuggestionKind::Example }
}
/// Creates a new [`SuggestionKind::Fix`] suggestion.
///
/// When possible, will attempt to inline the suggestion.
pub fn fix(content: String, applicability: Applicability) -> Self {
Self {
desc: None,
content,
kind: SuggestionKind::Fix {
span: None,
applicability,
style: SuggestionStyle::ShowCode,
},
}
}
/// Sets the description for the suggestion.
pub fn with_desc(mut self, desc: &'static str) -> Self {
self.desc = Some(desc);
self
}
/// Sets the span for a [`SuggestionKind::Fix`] suggestion.
pub fn with_span(mut self, span: Span) -> Self {
if let SuggestionKind::Fix { span: ref mut s, .. } = self.kind {
*s = Some(span);
}
self
}
/// Sets the style for a [`SuggestionKind::Fix`] suggestion.
pub fn with_style(mut self, style: SuggestionStyle) -> Self {
if let SuggestionKind::Fix { style: ref mut s, .. } = self.kind {
*s = style;
}
self
}
fn to_note(&self) -> Option<Vec<(DiagMsg, Style)>> {
if let SuggestionKind::Fix { .. } = &self.kind {
return None;
};
let mut output = if let Some(desc) = self.desc {
vec![(DiagMsg::from(desc), Style::NoStyle), (DiagMsg::from("\n\n"), Style::NoStyle)]
} else {
vec![(DiagMsg::from(" \n"), Style::NoStyle)]
};
output.extend(
self.content.lines().map(|line| (DiagMsg::from(format!("{line}\n")), Style::NoStyle)),
);
output.push((DiagMsg::from("\n"), Style::NoStyle));
Some(output)
}
}
/// Creates a hyperlink of the input url.
fn hyperlink(url: &'static str) -> String {
format!("\x1b]8;;{url}\x1b\\{url}\x1b]8;;\x1b\\")
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/linter/late.rs | crates/lint/src/linter/late.rs | use solar::{interface::data_structures::Never, sema::hir};
use std::ops::ControlFlow;
use super::LintContext;
/// Trait for lints that operate on the HIR (High-level Intermediate Representation).
/// Its methods mirror `hir::visit::Visit`, with the addition of `LintCotext`.
pub trait LateLintPass<'hir>: Send + Sync {
fn check_nested_source(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_id: hir::SourceId,
) {
}
fn check_nested_item(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_id: &'hir hir::ItemId,
) {
}
fn check_nested_contract(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_id: &'hir hir::ContractId,
) {
}
fn check_nested_function(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_id: &'hir hir::FunctionId,
) {
}
fn check_nested_var(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_id: &'hir hir::VariableId,
) {
}
fn check_item(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_item: hir::Item<'hir, 'hir>,
) {
}
fn check_contract(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_contract: &'hir hir::Contract<'hir>,
) {
}
fn check_function(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_func: &'hir hir::Function<'hir>,
) {
}
fn check_modifier(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_mod: &'hir hir::Modifier<'hir>,
) {
}
fn check_var(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_var: &'hir hir::Variable<'hir>,
) {
}
fn check_expr(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_expr: &'hir hir::Expr<'hir>,
) {
}
fn check_call_args(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_args: &'hir hir::CallArgs<'hir>,
) {
}
fn check_stmt(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_stmt: &'hir hir::Stmt<'hir>,
) {
}
fn check_ty(
&mut self,
_ctx: &LintContext,
_hir: &'hir hir::Hir<'hir>,
_ty: &'hir hir::Type<'hir>,
) {
}
}
/// Visitor struct for `LateLintPass`es
pub struct LateLintVisitor<'a, 's, 'hir> {
ctx: &'a LintContext<'s, 'a>,
passes: &'a mut [Box<dyn LateLintPass<'hir> + 's>],
hir: &'hir hir::Hir<'hir>,
}
impl<'a, 's, 'hir> LateLintVisitor<'a, 's, 'hir>
where
's: 'hir,
{
pub fn new(
ctx: &'a LintContext<'s, 'a>,
passes: &'a mut [Box<dyn LateLintPass<'hir> + 's>],
hir: &'hir hir::Hir<'hir>,
) -> Self {
Self { ctx, passes, hir }
}
}
impl<'s, 'hir> hir::Visit<'hir> for LateLintVisitor<'_, 's, 'hir>
where
's: 'hir,
{
type BreakValue = Never;
fn hir(&self) -> &'hir hir::Hir<'hir> {
self.hir
}
fn visit_nested_source(&mut self, id: hir::SourceId) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_nested_source(self.ctx, self.hir, id);
}
self.walk_nested_source(id)
}
fn visit_contract(
&mut self,
contract: &'hir hir::Contract<'hir>,
) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_contract(self.ctx, self.hir, contract);
}
self.walk_contract(contract)
}
fn visit_function(&mut self, func: &'hir hir::Function<'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_function(self.ctx, self.hir, func);
}
self.walk_function(func)
}
fn visit_item(&mut self, item: hir::Item<'hir, 'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_item(self.ctx, self.hir, item);
}
self.walk_item(item)
}
fn visit_var(&mut self, var: &'hir hir::Variable<'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_var(self.ctx, self.hir, var);
}
self.walk_var(var)
}
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_expr(self.ctx, self.hir, expr);
}
self.walk_expr(expr)
}
fn visit_stmt(&mut self, stmt: &'hir hir::Stmt<'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_stmt(self.ctx, self.hir, stmt);
}
self.walk_stmt(stmt)
}
fn visit_ty(&mut self, ty: &'hir hir::Type<'hir>) -> ControlFlow<Self::BreakValue> {
for pass in self.passes.iter_mut() {
pass.check_ty(self.ctx, self.hir, ty);
}
self.walk_ty(ty)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/macros.rs | crates/lint/src/sol/macros.rs | /// Macro for defining lints and relevant metadata for the Solidity linter.
///
/// # Parameters
///
/// Each lint requires the following input fields:
/// - `$id`: Identifier of the generated `SolLint` constant.
/// - `$severity`: The `Severity` of the lint (e.g. `High`, `Med`, `Low`, `Info`, `Gas`).
/// - `$str_id`: A unique identifier used to reference a specific lint during configuration.
/// - `$desc`: A short description of the lint.
///
/// # Note
/// Each lint must have a `help` section in the foundry book. This help field is auto-generated by
/// the macro. Because of that, to ensure that new lint rules have their corresponding docs in the
/// book, the existence of the lint rule's help section is validated with a unit test.
#[macro_export]
macro_rules! declare_forge_lint {
($id:ident, $severity:expr, $str_id:expr, $desc:expr) => {
// Declare the static `Lint` metadata
pub static $id: SolLint = SolLint {
id: $str_id,
severity: $severity,
description: $desc,
help: concat!("https://book.getfoundry.sh/reference/forge/forge-lint#", $str_id),
};
};
($id:ident, $severity:expr, $str_id:expr, $desc:expr) => {
$crate::declare_forge_lint!($id, $severity, $str_id, $desc, "");
};
}
/// Registers Solidity linter passes that can have both early and late variants.
///
/// # Parameters
///
/// Each pass is declared with:
/// - `$pass_id`: Identifier of the generated struct that will implement the pass trait(s).
/// - `$pass_type`: Either `early`, `late`, or `both` to indicate which traits to implement.
/// - `$lints`: A parenthesized, comma-separated list of `SolLint` constants.
///
/// # Outputs
///
/// - Structs for each linting pass
/// - Helper methods to create early and late passes with required lifetimes
/// - `const REGISTERED_LINTS` containing all registered lint objects
#[macro_export]
macro_rules! register_lints {
// 1. Internal rule for declaring structs and their associated lints.
( @declare_structs $( ($pass_id:ident, $pass_type:ident, ($($lint:expr),* $(,)?)) ),* $(,)? ) => {
$(
#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
pub struct $pass_id;
impl $pass_id {
/// Static slice of lints associated with this pass.
const LINTS: &'static [SolLint] = &[$($lint),*];
register_lints!(@early_impl $pass_id, $pass_type);
register_lints!(@late_impl $pass_id, $pass_type);
}
)*
};
// 2. Internal rule for declaring the const array of ALL lints.
( @declare_consts $( ($pass_id:ident, $pass_type:ident, ($($lint:expr),* $(,)?)) ),* $(,)? ) => {
pub const REGISTERED_LINTS: &[SolLint] = &[
$(
$($lint,)*
)*
];
};
// 3. Internal rule for declaring the helper functions.
( @declare_funcs $( ($pass_id:ident, $pass_type:ident, $lints:tt) ),* $(,)? ) => {
pub fn create_early_lint_passes<'ast>() -> Vec<(Box<dyn EarlyLintPass<'ast>>, &'static [SolLint])> {
[
$(
register_lints!(@early_create $pass_id, $pass_type),
)*
]
.into_iter()
.flatten()
.collect()
}
pub fn create_late_lint_passes<'hir>() -> Vec<(Box<dyn LateLintPass<'hir>>, &'static [SolLint])> {
[
$(
register_lints!(@late_create $pass_id, $pass_type),
)*
]
.into_iter()
.flatten()
.collect()
}
};
// --- HELPERS ------------------------------------------------------------
(@early_impl $_pass_id:ident, late) => {};
(@early_impl $pass_id:ident, $other:ident) => {
pub fn as_early_lint_pass<'a>() -> Box<dyn EarlyLintPass<'a>> {
Box::new(Self::default())
}
};
(@late_impl $_pass_id:ident, early) => {};
(@late_impl $pass_id:ident, $other:ident) => {
pub fn as_late_lint_pass<'hir>() -> Box<dyn LateLintPass<'hir>> {
Box::new(Self::default())
}
};
(@early_create $_pass_id:ident, late) => { None };
(@early_create $pass_id:ident, $_other:ident) => {
Some(($pass_id::as_early_lint_pass(), $pass_id::LINTS))
};
(@late_create $_pass_id:ident, early) => { None };
(@late_create $pass_id:ident, $_other:ident) => {
Some(($pass_id::as_late_lint_pass(), $pass_id::LINTS))
};
// --- ENTRY POINT ---------------------------------------------------------
( $($tokens:tt)* ) => {
register_lints! { @declare_structs $($tokens)* }
register_lints! { @declare_consts $($tokens)* }
register_lints! { @declare_funcs $($tokens)* }
};
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/mod.rs | crates/lint/src/sol/mod.rs | use crate::linter::{
EarlyLintPass, EarlyLintVisitor, LateLintPass, LateLintVisitor, Lint, LintContext, Linter,
LinterConfig,
};
use foundry_common::{
comments::{
Comments,
inline_config::{InlineConfig, InlineConfigItem},
},
errors::convert_solar_errors,
sh_warn,
};
use foundry_compilers::{ProjectPathsConfig, solc::SolcLanguage};
use foundry_config::{DenyLevel, lint::Severity};
use rayon::prelude::*;
use solar::{
ast::{self as ast, visit::Visit as _},
interface::{
Session,
diagnostics::{self, HumanEmitter, JsonEmitter},
},
sema::{
Compiler, Gcx,
hir::{self, Visit as _},
},
};
use std::{
path::{Path, PathBuf},
sync::LazyLock,
};
use thiserror::Error;
#[macro_use]
pub mod macros;
pub mod codesize;
pub mod gas;
pub mod high;
pub mod info;
pub mod med;
static ALL_REGISTERED_LINTS: LazyLock<Vec<&'static str>> = LazyLock::new(|| {
let mut lints = Vec::new();
lints.extend_from_slice(high::REGISTERED_LINTS);
lints.extend_from_slice(med::REGISTERED_LINTS);
lints.extend_from_slice(info::REGISTERED_LINTS);
lints.extend_from_slice(gas::REGISTERED_LINTS);
lints.extend_from_slice(codesize::REGISTERED_LINTS);
lints.into_iter().map(|lint| lint.id()).collect()
});
/// Linter implementation to analyze Solidity source code responsible for identifying
/// vulnerabilities gas optimizations, and best practices.
#[derive(Debug)]
pub struct SolidityLinter<'a> {
path_config: ProjectPathsConfig,
severity: Option<Vec<Severity>>,
lints_included: Option<Vec<SolLint>>,
lints_excluded: Option<Vec<SolLint>>,
with_description: bool,
with_json_emitter: bool,
// lint-specific configuration
mixed_case_exceptions: &'a [String],
}
impl<'a> SolidityLinter<'a> {
pub fn new(path_config: ProjectPathsConfig) -> Self {
Self {
path_config,
with_description: true,
severity: None,
lints_included: None,
lints_excluded: None,
with_json_emitter: false,
mixed_case_exceptions: &[],
}
}
pub fn with_severity(mut self, severity: Option<Vec<Severity>>) -> Self {
self.severity = severity;
self
}
pub fn with_lints(mut self, lints: Option<Vec<SolLint>>) -> Self {
self.lints_included = lints;
self
}
pub fn without_lints(mut self, lints: Option<Vec<SolLint>>) -> Self {
self.lints_excluded = lints;
self
}
pub fn with_description(mut self, with: bool) -> Self {
self.with_description = with;
self
}
pub fn with_json_emitter(mut self, with: bool) -> Self {
self.with_json_emitter = with;
self
}
pub fn with_mixed_case_exceptions(mut self, exceptions: &'a [String]) -> Self {
self.mixed_case_exceptions = exceptions;
self
}
fn config(&'a self, inline: &'a InlineConfig<Vec<String>>) -> LinterConfig<'a> {
LinterConfig { inline, mixed_case_exceptions: self.mixed_case_exceptions }
}
fn include_lint(&self, lint: SolLint) -> bool {
self.severity.as_ref().is_none_or(|sev| sev.contains(&lint.severity()))
&& self.lints_included.as_ref().is_none_or(|incl| incl.contains(&lint))
&& !self.lints_excluded.as_ref().is_some_and(|excl| excl.contains(&lint))
}
fn process_source_ast<'gcx>(
&self,
sess: &'gcx Session,
ast: &'gcx ast::SourceUnit<'gcx>,
path: &Path,
inline_config: &InlineConfig<Vec<String>>,
) -> Result<(), diagnostics::ErrorGuaranteed> {
// Declare all available passes and lints
let mut passes_and_lints = Vec::new();
passes_and_lints.extend(high::create_early_lint_passes());
passes_and_lints.extend(med::create_early_lint_passes());
passes_and_lints.extend(info::create_early_lint_passes());
// Do not apply 'gas' and 'codesize' severity rules on tests and scripts
if !self.path_config.is_test_or_script(path) {
passes_and_lints.extend(gas::create_early_lint_passes());
passes_and_lints.extend(codesize::create_early_lint_passes());
}
// Filter passes based on linter config
let (mut passes, lints): (Vec<Box<dyn EarlyLintPass<'_>>>, Vec<_>) = passes_and_lints
.into_iter()
.fold((Vec::new(), Vec::new()), |(mut passes, mut ids), (pass, lints)| {
let included_ids: Vec<_> = lints
.iter()
.filter_map(|lint| if self.include_lint(*lint) { Some(lint.id) } else { None })
.collect();
if !included_ids.is_empty() {
passes.push(pass);
ids.extend(included_ids);
}
(passes, ids)
});
// Initialize and run the early lint visitor
let ctx = LintContext::new(
sess,
self.with_description,
self.with_json_emitter,
self.config(inline_config),
lints,
);
let mut early_visitor = EarlyLintVisitor::new(&ctx, &mut passes);
_ = early_visitor.visit_source_unit(ast);
early_visitor.post_source_unit(ast);
Ok(())
}
fn process_source_hir<'gcx>(
&self,
gcx: Gcx<'gcx>,
source_id: hir::SourceId,
path: &Path,
inline_config: &InlineConfig<Vec<String>>,
) -> Result<(), diagnostics::ErrorGuaranteed> {
// Declare all available passes and lints
let mut passes_and_lints = Vec::new();
passes_and_lints.extend(high::create_late_lint_passes());
passes_and_lints.extend(med::create_late_lint_passes());
passes_and_lints.extend(info::create_late_lint_passes());
// Do not apply 'gas' and 'codesize' severity rules on tests and scripts
if !self.path_config.is_test_or_script(path) {
passes_and_lints.extend(gas::create_late_lint_passes());
passes_and_lints.extend(codesize::create_late_lint_passes());
}
// Filter passes based on config
let (mut passes, lints): (Vec<Box<dyn LateLintPass<'_>>>, Vec<_>) = passes_and_lints
.into_iter()
.fold((Vec::new(), Vec::new()), |(mut passes, mut ids), (pass, lints)| {
let included_ids: Vec<_> = lints
.iter()
.filter_map(|lint| if self.include_lint(*lint) { Some(lint.id) } else { None })
.collect();
if !included_ids.is_empty() {
passes.push(pass);
ids.extend(included_ids);
}
(passes, ids)
});
// Run late lint visitor
let ctx = LintContext::new(
gcx.sess,
self.with_description,
self.with_json_emitter,
self.config(inline_config),
lints,
);
let mut late_visitor = LateLintVisitor::new(&ctx, &mut passes, &gcx.hir);
// Visit this specific source
let _ = late_visitor.visit_nested_source(source_id);
Ok(())
}
}
impl<'a> Linter for SolidityLinter<'a> {
type Language = SolcLanguage;
type Lint = SolLint;
fn lint(
&self,
input: &[PathBuf],
deny: DenyLevel,
compiler: &mut Compiler,
) -> eyre::Result<()> {
convert_solar_errors(compiler.dcx())?;
let ui_testing = std::env::var_os("FOUNDRY_LINT_UI_TESTING").is_some();
let sm = compiler.sess().clone_source_map();
let prev_emitter = compiler.dcx().set_emitter(if self.with_json_emitter {
let writer = Box::new(std::io::BufWriter::new(std::io::stderr()));
let json_emitter = JsonEmitter::new(writer, sm).rustc_like(true).ui_testing(ui_testing);
Box::new(json_emitter)
} else {
Box::new(HumanEmitter::stderr(Default::default()).source_map(Some(sm)))
});
let sess = compiler.sess_mut();
sess.dcx.set_flags_mut(|f| f.track_diagnostics = false);
if ui_testing {
sess.opts.unstable.ui_testing = true;
sess.reconfigure();
}
compiler.enter_mut(|compiler| -> eyre::Result<()> {
if compiler.gcx().stage() < Some(solar::config::CompilerStage::Lowering) {
let _ = compiler.lower_asts();
}
let gcx = compiler.gcx();
input.par_iter().for_each(|path| {
let path = &self.path_config.root.join(path);
let Some((_, ast_source)) = gcx.get_ast_source(path) else {
// issue a warning rather than panicking, in case that some (but not all) of the
// input files have old solidity versions which are not supported by solar.
_ = sh_warn!("AST source not found for {}", path.display());
return;
};
let Some(ast) = &ast_source.ast else {
panic!("AST missing for {}", path.display());
};
// Parse inline config.
let file = &ast_source.file;
let comments = Comments::new(file, gcx.sess.source_map(), false, false, None);
let inline_config = parse_inline_config(gcx.sess, &comments, ast);
// Early lints.
let _ = self.process_source_ast(gcx.sess, ast, path, &inline_config);
// Late lints.
let Some((hir_source_id, _)) = gcx.get_hir_source(path) else {
panic!("HIR source not found for {}", path.display());
};
let _ = self.process_source_hir(gcx, hir_source_id, path, &inline_config);
});
convert_solar_errors(compiler.dcx())
})?;
let sess = compiler.sess_mut();
sess.dcx.set_emitter(prev_emitter);
if ui_testing {
sess.opts.unstable.ui_testing = false;
sess.reconfigure();
}
// Handle diagnostics and fail if necessary.
const MSG: &str = "aborting due to ";
match (deny, compiler.dcx().warn_count(), compiler.dcx().note_count()) {
// Deny warnings.
(DenyLevel::Warnings, w, n) if w > 0 => {
if n > 0 {
Err(eyre::eyre!("{MSG}{w} linter warning(s); {n} note(s) were also emitted\n"))
} else {
Err(eyre::eyre!("{MSG}{w} linter warning(s)\n"))
}
}
// Deny any diagnostic.
(DenyLevel::Notes, w, n) if w > 0 || n > 0 => match (w, n) {
(w, n) if w > 0 && n > 0 => {
Err(eyre::eyre!("{MSG}{w} linter warning(s) and {n} note(s)\n"))
}
(w, 0) => Err(eyre::eyre!("{MSG}{w} linter warning(s)\n")),
(0, n) => Err(eyre::eyre!("{MSG}{n} linter note(s)\n")),
_ => unreachable!(),
},
// Otherwise, succeed.
_ => Ok(()),
}
}
}
fn parse_inline_config<'ast>(
sess: &Session,
comments: &Comments,
ast: &'ast ast::SourceUnit<'ast>,
) -> InlineConfig<Vec<String>> {
let items = comments.iter().filter_map(|comment| {
let mut item = comment.lines.first()?.as_str();
if let Some(prefix) = comment.prefix() {
item = item.strip_prefix(prefix).unwrap_or(item);
}
if let Some(suffix) = comment.suffix() {
item = item.strip_suffix(suffix).unwrap_or(item);
}
let item = item.trim_start().strip_prefix("forge-lint:")?.trim();
let span = comment.span;
match InlineConfigItem::parse(item, &ALL_REGISTERED_LINTS) {
Ok(item) => Some((span, item)),
Err(e) => {
sess.dcx.warn(e.to_string()).span(span).emit();
None
}
}
});
InlineConfig::from_ast(items, ast, sess.source_map())
}
#[derive(Error, Debug)]
pub enum SolLintError {
#[error("Unknown lint ID: {0}")]
InvalidId(String),
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub struct SolLint {
id: &'static str,
description: &'static str,
help: &'static str,
severity: Severity,
}
impl Lint for SolLint {
fn id(&self) -> &'static str {
self.id
}
fn severity(&self) -> Severity {
self.severity
}
fn description(&self) -> &'static str {
self.description
}
fn help(&self) -> &'static str {
self.help
}
}
impl<'a> TryFrom<&'a str> for SolLint {
type Error = SolLintError;
fn try_from(value: &'a str) -> Result<Self, Self::Error> {
for &lint in high::REGISTERED_LINTS {
if lint.id() == value {
return Ok(lint);
}
}
for &lint in med::REGISTERED_LINTS {
if lint.id() == value {
return Ok(lint);
}
}
for &lint in info::REGISTERED_LINTS {
if lint.id() == value {
return Ok(lint);
}
}
for &lint in gas::REGISTERED_LINTS {
if lint.id() == value {
return Ok(lint);
}
}
for &lint in codesize::REGISTERED_LINTS {
if lint.id() == value {
return Ok(lint);
}
}
Err(SolLintError::InvalidId(value.to_string()))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/med/unsafe_typecast.rs | crates/lint/src/sol/med/unsafe_typecast.rs | use super::UnsafeTypecast;
use crate::{
linter::{LateLintPass, LintContext, Suggestion},
sol::{Severity, SolLint},
};
use solar::{
ast::{LitKind, StrKind},
sema::hir::{self, ElementaryType, ExprKind, ItemId, Res, TypeKind},
};
declare_forge_lint!(
UNSAFE_TYPECAST,
Severity::Med,
"unsafe-typecast",
"typecasts that can truncate values should be checked"
);
impl<'hir> LateLintPass<'hir> for UnsafeTypecast {
fn check_expr(
&mut self,
ctx: &LintContext,
hir: &'hir hir::Hir<'hir>,
expr: &'hir hir::Expr<'hir>,
) {
// Check for type cast expressions: Type(value)
if let ExprKind::Call(call, args, _) = &expr.kind
&& let ExprKind::Type(hir::Type { kind: TypeKind::Elementary(ty), .. }) = &call.kind
&& args.len() == 1
&& let Some(call_arg) = args.exprs().next()
&& is_unsafe_typecast_hir(hir, call_arg, ty)
{
ctx.emit_with_suggestion(
&UNSAFE_TYPECAST,
expr.span,
Suggestion::example(
format!(
"// casting to '{abi_ty}' is safe because [explain why]\n// forge-lint: disable-next-line(unsafe-typecast)",
abi_ty = ty.to_abi_str()
)).with_desc("consider disabling this lint if you're certain the cast is safe"));
}
}
}
/// Determines if a typecast is potentially unsafe (could lose data or precision).
fn is_unsafe_typecast_hir(
hir: &hir::Hir<'_>,
source_expr: &hir::Expr<'_>,
target_type: &hir::ElementaryType,
) -> bool {
let mut source_types = Vec::<ElementaryType>::new();
infer_source_types(Some(&mut source_types), hir, source_expr);
if source_types.is_empty() {
return false;
};
source_types.iter().any(|source_ty| is_unsafe_elementary_typecast(source_ty, target_type))
}
/// Infers the elementary source type(s) of an expression.
///
/// This function traverses an expression tree to find the original "source" types.
/// For cast chains, it returns the ultimate source type, not intermediate cast results.
/// For binary operations, it collects types from both sides into the `output` vector.
///
/// # Returns
/// An `Option<ElementaryType>` containing the inferred type of the expression if it can be
/// resolved to a single source (like variables, literals, or unary expressions).
/// Returns `None` for expressions complex expressions (like binary operations).
fn infer_source_types(
mut output: Option<&mut Vec<ElementaryType>>,
hir: &hir::Hir<'_>,
expr: &hir::Expr<'_>,
) -> Option<ElementaryType> {
let mut track = |ty: ElementaryType| -> Option<ElementaryType> {
if let Some(output) = output.as_mut() {
output.push(ty);
}
Some(ty)
};
match &expr.kind {
// A type cast call: `Type(val)`
ExprKind::Call(call_expr, args, ..) => {
// Check if the called expression is a type, which indicates a cast.
if let ExprKind::Type(hir::Type { kind: TypeKind::Elementary(..), .. }) =
&call_expr.kind
&& let Some(inner) = args.exprs().next()
{
// Recurse to find the original (inner-most) source type.
return infer_source_types(output, hir, inner);
}
None
}
// Identifiers (variables)
ExprKind::Ident(resolutions) => {
if let Some(Res::Item(ItemId::Variable(var_id))) = resolutions.first() {
let variable = hir.variable(*var_id);
if let TypeKind::Elementary(elem_type) = &variable.ty.kind {
return track(*elem_type);
}
}
None
}
// Handle literal values
ExprKind::Lit(hir::Lit { kind, .. }) => match kind {
LitKind::Str(StrKind::Hex, ..) => track(ElementaryType::Bytes),
LitKind::Str(..) => track(ElementaryType::String),
LitKind::Address(_) => track(ElementaryType::Address(false)),
LitKind::Bool(_) => track(ElementaryType::Bool),
// Unnecessary to check numbers as assigning literal values that cannot fit into a type
// throws a compiler error. Reference: <https://solang.readthedocs.io/en/latest/language/types.html>
_ => None,
},
// Unary operations: Recurse to find the source type of the inner expression.
ExprKind::Unary(_, inner_expr) => infer_source_types(output, hir, inner_expr),
// Binary operations
ExprKind::Binary(lhs, _, rhs) => {
if let Some(mut output) = output {
// Recurse on both sides to find and collect all source types.
infer_source_types(Some(&mut output), hir, lhs);
infer_source_types(Some(&mut output), hir, rhs);
}
None
}
// Complex expressions are not evaluated
_ => None,
}
}
/// Checks if a type cast from source_type to target_type is unsafe.
fn is_unsafe_elementary_typecast(
source_type: &ElementaryType,
target_type: &ElementaryType,
) -> bool {
match (source_type, target_type) {
// Numeric downcasts (smaller target size)
(ElementaryType::UInt(source_size), ElementaryType::UInt(target_size))
| (ElementaryType::Int(source_size), ElementaryType::Int(target_size)) => {
source_size.bits() > target_size.bits()
}
// Signed to unsigned conversion (potential loss of sign)
(ElementaryType::Int(_), ElementaryType::UInt(_)) => true,
// Unsigned to signed conversion with same or smaller size
(ElementaryType::UInt(source_size), ElementaryType::Int(target_size)) => {
source_size.bits() >= target_size.bits()
}
// Fixed bytes to smaller fixed bytes
(ElementaryType::FixedBytes(source_size), ElementaryType::FixedBytes(target_size)) => {
source_size.bytes() > target_size.bytes()
}
// Dynamic bytes to fixed bytes (potential truncation)
(ElementaryType::Bytes, ElementaryType::FixedBytes(_))
| (ElementaryType::String, ElementaryType::FixedBytes(_)) => true,
// Address to smaller uint (truncation) - address is 160 bits
(ElementaryType::Address(_), ElementaryType::UInt(target_size)) => target_size.bits() < 160,
// Address to int (sign issues)
(ElementaryType::Address(_), ElementaryType::Int(_)) => true,
_ => false,
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/med/div_mul.rs | crates/lint/src/sol/med/div_mul.rs | use super::DivideBeforeMultiply;
use crate::{
linter::{EarlyLintPass, LintContext},
sol::{Severity, SolLint},
};
use solar::{
ast::{BinOp, BinOpKind, Expr, ExprKind},
interface::SpannedOption,
};
declare_forge_lint!(
DIVIDE_BEFORE_MULTIPLY,
Severity::Med,
"divide-before-multiply",
"multiplication should occur before division to avoid loss of precision"
);
impl<'ast> EarlyLintPass<'ast> for DivideBeforeMultiply {
fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) {
if let ExprKind::Binary(left_expr, BinOp { kind: BinOpKind::Mul, .. }, _) = &expr.kind
&& contains_division(left_expr)
{
ctx.emit(&DIVIDE_BEFORE_MULTIPLY, expr.span);
}
}
}
fn contains_division<'ast>(expr: &'ast Expr<'ast>) -> bool {
match &expr.kind {
ExprKind::Binary(_, BinOp { kind: BinOpKind::Div, .. }, _) => true,
ExprKind::Tuple(inner_exprs) => inner_exprs.iter().any(|opt_expr| {
if let SpannedOption::Some(inner_expr) = opt_expr.as_ref() {
contains_division(inner_expr)
} else {
false
}
}),
_ => false,
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/med/mod.rs | crates/lint/src/sol/med/mod.rs | use crate::sol::{EarlyLintPass, LateLintPass, SolLint};
mod div_mul;
use div_mul::DIVIDE_BEFORE_MULTIPLY;
mod unsafe_typecast;
use unsafe_typecast::UNSAFE_TYPECAST;
register_lints!(
(DivideBeforeMultiply, early, (DIVIDE_BEFORE_MULTIPLY)),
(UnsafeTypecast, late, (UNSAFE_TYPECAST))
);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/high/incorrect_shift.rs | crates/lint/src/sol/high/incorrect_shift.rs | use super::IncorrectShift;
use crate::{
linter::{EarlyLintPass, LintContext},
sol::{Severity, SolLint},
};
use solar::ast::{BinOp, BinOpKind, Expr, ExprKind};
declare_forge_lint!(
INCORRECT_SHIFT,
Severity::High,
"incorrect-shift",
"the order of args in a shift operation is incorrect"
);
impl<'ast> EarlyLintPass<'ast> for IncorrectShift {
fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) {
if let ExprKind::Binary(
left_expr,
BinOp { kind: BinOpKind::Shl | BinOpKind::Shr, .. },
right_expr,
) = &expr.kind
&& contains_incorrect_shift(left_expr, right_expr)
{
ctx.emit(&INCORRECT_SHIFT, expr.span);
}
}
}
// TODO: come up with a better heuristic. Treat initial impl as a PoC.
// Checks if the left operand is a literal and the right operand is not, indicating a potential
// reversed shift operation.
fn contains_incorrect_shift<'ast>(
left_expr: &'ast Expr<'ast>,
right_expr: &'ast Expr<'ast>,
) -> bool {
let is_left_literal = matches!(left_expr.kind, ExprKind::Lit(..));
let is_right_not_literal = !matches!(right_expr.kind, ExprKind::Lit(..));
is_left_literal && is_right_not_literal
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/high/mod.rs | crates/lint/src/sol/high/mod.rs | use crate::sol::{EarlyLintPass, LateLintPass, SolLint};
mod incorrect_shift;
mod unchecked_calls;
use incorrect_shift::INCORRECT_SHIFT;
use unchecked_calls::{ERC20_UNCHECKED_TRANSFER, UNCHECKED_CALL};
register_lints!(
(IncorrectShift, early, (INCORRECT_SHIFT)),
(UncheckedCall, early, (UNCHECKED_CALL)),
(UncheckedTransferERC20, late, (ERC20_UNCHECKED_TRANSFER))
);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/high/unchecked_calls.rs | crates/lint/src/sol/high/unchecked_calls.rs | use super::{UncheckedCall, UncheckedTransferERC20};
use crate::{
linter::{EarlyLintPass, LateLintPass, LintContext},
sol::{Severity, SolLint},
};
use solar::{
ast::{Expr, ExprKind, ItemFunction, Stmt, StmtKind, visit::Visit},
interface::kw,
sema::hir::{self},
};
use std::ops::ControlFlow;
declare_forge_lint!(
UNCHECKED_CALL,
Severity::High,
"unchecked-call",
"Low-level calls should check the success return value"
);
declare_forge_lint!(
ERC20_UNCHECKED_TRANSFER,
Severity::High,
"erc20-unchecked-transfer",
"ERC20 'transfer' and 'transferFrom' calls should check the return value"
);
// -- ERC20 UNCKECKED TRANSFERS -------------------------------------------------------------------
/// Checks that calls to functions with the same signature as the ERC20 transfer methods, and which
/// return a boolean are not ignored.
///
/// WARN: can issue false positives, as it doesn't check that the contract being called sticks to
/// the full ERC20 specification.
impl<'hir> LateLintPass<'hir> for UncheckedTransferERC20 {
fn check_stmt(
&mut self,
ctx: &LintContext,
hir: &'hir hir::Hir<'hir>,
stmt: &'hir hir::Stmt<'hir>,
) {
// Only expression statements can contain unchecked transfers.
if let hir::StmtKind::Expr(expr) = &stmt.kind
&& is_erc20_transfer_call(hir, expr)
{
ctx.emit(&ERC20_UNCHECKED_TRANSFER, expr.span);
}
}
}
/// Checks if an expression is an ERC20 `transfer` or `transferFrom` call.
/// * `function transfer(address to, uint256 amount) external returns bool;`
/// * `function transferFrom(address from, address to, uint256 amount) external returns bool;`
///
/// Validates the method name, the params (count + types), and the returns (count + types).
fn is_erc20_transfer_call(hir: &hir::Hir<'_>, expr: &hir::Expr<'_>) -> bool {
let is_type = |var_id: hir::VariableId, type_str: &str| {
matches!(
&hir.variable(var_id).ty.kind,
hir::TypeKind::Elementary(ty) if ty.to_abi_str() == type_str
)
};
// Ensure the expression is a call to a contract member function.
let hir::ExprKind::Call(
hir::Expr { kind: hir::ExprKind::Member(contract_expr, func_ident), .. },
hir::CallArgs { kind: hir::CallArgsKind::Unnamed(args), .. },
..,
) = &expr.kind
else {
return false;
};
// Determine the expected ERC20 signature from the call
let (expected_params, expected_returns): (&[&str], &[&str]) = match func_ident.as_str() {
"transferFrom" if args.len() == 3 => (&["address", "address", "uint256"], &["bool"]),
"transfer" if args.len() == 2 => (&["address", "uint256"], &["bool"]),
_ => return false,
};
let Some(cid) = (match &contract_expr.kind {
// Call to pre-instantiated contract variable
hir::ExprKind::Ident([hir::Res::Item(hir::ItemId::Variable(id)), ..]) => {
if let hir::TypeKind::Custom(hir::ItemId::Contract(cid)) = hir.variable(*id).ty.kind {
Some(cid)
} else {
None
}
}
// Call to address wrapped by the contract interface
hir::ExprKind::Call(
hir::Expr {
kind: hir::ExprKind::Ident([hir::Res::Item(hir::ItemId::Contract(cid))]),
..
},
..,
) => Some(*cid),
_ => None,
}) else {
return false;
};
// Try to find a function in the contract that matches the expected signature.
hir.contract_item_ids(cid).any(|item| {
let Some(fid) = item.as_function() else { return false };
let func = hir.function(fid);
func.name.is_some_and(|name| name.as_str() == func_ident.as_str())
&& func.kind.is_function()
&& func.mutates_state()
&& func.parameters.len() == expected_params.len()
&& func.returns.len() == expected_returns.len()
&& func.parameters.iter().zip(expected_params).all(|(id, &ty)| is_type(*id, ty))
&& func.returns.iter().zip(expected_returns).all(|(id, &ty)| is_type(*id, ty))
})
}
// -- UNCKECKED LOW-LEVEL CALLS -------------------------------------------------------------------
impl<'ast> EarlyLintPass<'ast> for UncheckedCall {
fn check_item_function(&mut self, ctx: &LintContext, func: &'ast ItemFunction<'ast>) {
if let Some(body) = &func.body {
let mut checker = UncheckedCallChecker { ctx };
let _ = checker.visit_block(body);
}
}
}
/// Visitor that detects unchecked low-level calls within function bodies.
///
/// Similar to unchecked transfers, unchecked calls appear as standalone expression
/// statements. When the success value is checked (in require, if, etc.), the call
/// is part of a larger expression and won't be flagged.
struct UncheckedCallChecker<'a, 's> {
ctx: &'a LintContext<'s, 'a>,
}
impl<'ast> Visit<'ast> for UncheckedCallChecker<'_, '_> {
type BreakValue = ();
fn visit_stmt(&mut self, stmt: &'ast Stmt<'ast>) -> ControlFlow<Self::BreakValue> {
match &stmt.kind {
// Check standalone expression statements: `target.call(data);`
StmtKind::Expr(expr) => {
if is_low_level_call(expr) {
self.ctx.emit(&UNCHECKED_CALL, expr.span);
} else if let ExprKind::Assign(lhs, _, rhs) = &expr.kind {
// Check assignments to existing vars: `(, existingVar) = target.call(data);`
if is_low_level_call(rhs) && is_unchecked_tuple_assignment(lhs) {
self.ctx.emit(&UNCHECKED_CALL, expr.span);
}
}
}
// Check multi-variable declarations: `(bool success, ) = target.call(data);`
StmtKind::DeclMulti(vars, expr) => {
if is_low_level_call(expr) && vars.first().is_none_or(|v| v.is_none()) {
self.ctx.emit(&UNCHECKED_CALL, stmt.span);
}
}
_ => {}
}
self.walk_stmt(stmt)
}
}
/// Checks if an expression is a low-level call that should be checked.
///
/// Detects patterns like:
/// - `target.call(...)`
/// - `target.delegatecall(...)`
/// - `target.staticcall(...)`
/// - `target.call{value: x}(...)`
fn is_low_level_call(expr: &Expr<'_>) -> bool {
if let ExprKind::Call(call_expr, _args) = &expr.kind {
// Check the callee expression
let callee = match &call_expr.kind {
// Handle call options like {value: x}
ExprKind::CallOptions(inner_expr, _) => inner_expr,
// Direct call without options
_ => call_expr,
};
if let ExprKind::Member(_, member) = &callee.kind {
// Check for low-level call methods
return matches!(member.name, kw::Call | kw::Delegatecall | kw::Staticcall);
}
}
false
}
/// Checks if a tuple assignment doesn't properly check the success value.
///
/// Returns true if the first variable (success) is None: `(, bytes memory data) =
/// target.call(...)`
fn is_unchecked_tuple_assignment(expr: &Expr<'_>) -> bool {
if let ExprKind::Tuple(elements) = &expr.kind {
elements.first().is_none_or(|e| e.is_none())
} else {
false
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs | crates/lint/src/sol/codesize/unwrapped_modifier_logic.rs | use super::UnwrappedModifierLogic;
use crate::{
linter::{LateLintPass, LintContext, Suggestion},
sol::{Severity, SolLint},
};
use solar::{
ast,
sema::hir::{self, Res},
};
declare_forge_lint!(
UNWRAPPED_MODIFIER_LOGIC,
Severity::CodeSize,
"unwrapped-modifier-logic",
"wrap modifier logic to reduce code size"
);
impl<'hir> LateLintPass<'hir> for UnwrappedModifierLogic {
fn check_function(
&mut self,
ctx: &LintContext,
hir: &'hir hir::Hir<'hir>,
func: &'hir hir::Function<'hir>,
) {
// Only check modifiers with a body and a name
let body = match (func.kind, &func.body, func.name) {
(ast::FunctionKind::Modifier, Some(body), Some(_)) => body,
_ => return,
};
// Split statements into before and after the placeholder `_`.
let stmts = body.stmts[..].as_ref();
let (before, after) = stmts
.iter()
.position(|s| matches!(s.kind, hir::StmtKind::Placeholder))
.map_or((stmts, &[][..]), |idx| (&stmts[..idx], &stmts[idx + 1..]));
// Generate a fix suggestion if the modifier logic should be wrapped.
if let Some(suggestion) = self.get_snippet(ctx, hir, func, before, after) {
ctx.emit_with_suggestion(
&UNWRAPPED_MODIFIER_LOGIC,
func.span.to(func.body_span),
suggestion,
);
}
}
}
impl UnwrappedModifierLogic {
/// Returns `true` if an expr is not a built-in ('require' or 'assert') call or a lib function.
fn is_valid_expr(&self, hir: &hir::Hir<'_>, expr: &hir::Expr<'_>) -> bool {
if let hir::ExprKind::Call(func_expr, _, _) = &expr.kind {
if let hir::ExprKind::Ident(resolutions) = &func_expr.kind {
return !resolutions.iter().any(|r| matches!(r, Res::Builtin(_)));
}
if let hir::ExprKind::Member(base, _) = &func_expr.kind
&& let hir::ExprKind::Ident(resolutions) = &base.kind
{
return resolutions.iter().any(|r| {
matches!(r, Res::Item(hir::ItemId::Contract(id)) if hir.contract(*id).kind == ast::ContractKind::Library)
});
}
}
false
}
/// Checks if a block of statements is complex and should be wrapped in a helper function.
///
/// This always is 'false' the modifier contains assembly. We assume that if devs know how to
/// use assembly, they will also know how to reduce the codesize of their contracts and they
/// have a good reason to use it on their modifiers.
///
/// This is 'true' if the block contains:
/// 1. Any statement that is not a placeholder or a valid expression.
/// 2. More than one simple call expression.
fn stmts_require_wrapping(&self, hir: &hir::Hir<'_>, stmts: &[hir::Stmt<'_>]) -> bool {
let (mut res, mut has_valid_stmt) = (false, false);
for stmt in stmts {
match &stmt.kind {
hir::StmtKind::Placeholder => continue,
hir::StmtKind::Expr(expr) => {
if !self.is_valid_expr(hir, expr) || has_valid_stmt {
res = true;
}
has_valid_stmt = true;
}
// HIR doesn't support assembly yet:
// <https://github.com/paradigmxyz/solar/blob/d25bf38a5accd11409318e023f701313d98b9e1e/crates/sema/src/hir/mod.rs#L977-L982>
hir::StmtKind::Err(_) => return false,
_ => res = true,
}
}
res
}
fn get_snippet<'a>(
&self,
ctx: &LintContext,
hir: &hir::Hir<'_>,
func: &hir::Function<'_>,
before: &'a [hir::Stmt<'a>],
after: &'a [hir::Stmt<'a>],
) -> Option<Suggestion> {
let wrap_before = !before.is_empty() && self.stmts_require_wrapping(hir, before);
let wrap_after = !after.is_empty() && self.stmts_require_wrapping(hir, after);
if !(wrap_before || wrap_after) {
return None;
}
let binding = func.name.unwrap();
let modifier_name = binding.name.as_str();
let mut param_list = vec![];
let mut param_decls = vec![];
for var_id in func.parameters {
let var = hir.variable(*var_id);
let ty = ctx
.span_to_snippet(var.ty.span)
.unwrap_or_else(|| "/* unknown type */".to_string());
// solidity functions should always have named parameters
if let Some(ident) = var.name {
param_list.push(ident.to_string());
param_decls.push(format!("{ty} {}", ident.to_string()));
}
}
let param_list = param_list.join(", ");
let param_decls = param_decls.join(", ");
let body_indent = " ".repeat(ctx.get_span_indentation(
before.first().or(after.first()).map(|stmt| stmt.span).unwrap_or(func.span),
));
let body = match (wrap_before, wrap_after) {
(true, true) => format!(
"{body_indent}_{modifier_name}Before({param_list});\n{body_indent}_;\n{body_indent}_{modifier_name}After({param_list});"
),
(true, false) => {
format!("{body_indent}_{modifier_name}({param_list});\n{body_indent}_;")
}
(false, true) => {
format!("{body_indent}_;\n{body_indent}_{modifier_name}({param_list});")
}
_ => unreachable!(),
};
let mod_indent = " ".repeat(ctx.get_span_indentation(func.span));
let mut replacement =
format!("modifier {modifier_name}({param_decls}) {{\n{body}\n{mod_indent}}}");
let build_func = |stmts: &[hir::Stmt<'_>], suffix: &str| {
let body_stmts = stmts
.iter()
.filter_map(|s| ctx.span_to_snippet(s.span))
.map(|code| format!("\n{body_indent}{code}"))
.collect::<String>();
format!(
"\n\n{mod_indent}function _{modifier_name}{suffix}({param_decls}) internal {{{body_stmts}\n{mod_indent}}}"
)
};
if wrap_before {
replacement.push_str(&build_func(before, if wrap_after { "Before" } else { "" }));
}
if wrap_after {
replacement.push_str(&build_func(after, if wrap_before { "After" } else { "" }));
}
Some(
Suggestion::fix(
replacement,
ast::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("wrap modifier logic to reduce code size"),
)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/codesize/mod.rs | crates/lint/src/sol/codesize/mod.rs | use crate::sol::{EarlyLintPass, LateLintPass, SolLint};
mod unwrapped_modifier_logic;
use unwrapped_modifier_logic::UNWRAPPED_MODIFIER_LOGIC;
register_lints!((UnwrappedModifierLogic, late, (UNWRAPPED_MODIFIER_LOGIC)));
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/gas/keccak.rs | crates/lint/src/sol/gas/keccak.rs | use super::AsmKeccak256;
use crate::{
linter::{LateLintPass, LintContext},
sol::{Severity, SolLint},
};
use solar::{
ast::{self as ast, Span},
interface::kw,
sema::hir::{self},
};
declare_forge_lint!(
ASM_KECCAK256,
Severity::Gas,
"asm-keccak256",
"use of inefficient hashing mechanism; consider using inline assembly"
);
impl<'hir> LateLintPass<'hir> for AsmKeccak256 {
fn check_stmt(
&mut self,
ctx: &LintContext,
hir: &'hir hir::Hir<'hir>,
stmt: &'hir hir::Stmt<'hir>,
) {
let check_expr_and_emit_lint =
|expr: &'hir hir::Expr<'hir>, assign: Option<ast::Ident>, is_return: bool| {
if let Some(hash_arg) = extract_keccak256_arg(expr) {
self.emit_lint(
ctx,
hir,
stmt.span,
expr,
hash_arg,
AsmContext { _assign: assign, _is_return: is_return },
);
}
};
match stmt.kind {
hir::StmtKind::DeclSingle(var_id) => {
let var = hir.variable(var_id);
if let Some(init) = var.initializer {
// Constants should be optimized by the compiler, so no gas savings apply.
if !matches!(var.mutability, Some(hir::VarMut::Constant)) {
check_expr_and_emit_lint(init, var.name, false);
}
}
}
// Expressions that don't (directly) assign to a variable
hir::StmtKind::Expr(expr)
| hir::StmtKind::Emit(expr)
| hir::StmtKind::Revert(expr)
| hir::StmtKind::DeclMulti(_, expr)
| hir::StmtKind::If(expr, ..) => check_expr_and_emit_lint(expr, None, false),
hir::StmtKind::Return(Some(expr)) => check_expr_and_emit_lint(expr, None, true),
_ => (),
}
}
}
impl AsmKeccak256 {
/// Emits lints (when possible with fix suggestions) for inefficient `keccak256` calls.
fn emit_lint(
&self,
ctx: &LintContext,
_hir: &hir::Hir<'_>,
_stmt_span: Span,
call: &hir::Expr<'_>,
_hash: &hir::Expr<'_>,
_asm_ctx: AsmContext,
) {
ctx.emit(&ASM_KECCAK256, call.span);
}
}
/// If the expression is a call to `keccak256` with one argument, returns that argument.
fn extract_keccak256_arg<'hir>(expr: &'hir hir::Expr<'hir>) -> Option<&'hir hir::Expr<'hir>> {
let hir::ExprKind::Call(
callee,
hir::CallArgs { kind: hir::CallArgsKind::Unnamed(args), .. },
..,
) = &expr.kind
else {
return None;
};
let is_keccak = if let hir::ExprKind::Ident([hir::Res::Builtin(builtin)]) = callee.kind {
matches!(builtin.name(), kw::Keccak256)
} else {
return None;
};
if is_keccak && args.len() == 1 { Some(&args[0]) } else { None }
}
// -- HELPER FUNCTIONS AND STRUCTS ----------------------------------------------------------------
#[derive(Debug, Clone, Copy)]
struct AsmContext {
_assign: Option<ast::Ident>,
_is_return: bool,
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/gas/mod.rs | crates/lint/src/sol/gas/mod.rs | use crate::sol::{EarlyLintPass, LateLintPass, SolLint};
mod keccak;
use keccak::ASM_KECCAK256;
register_lints!((AsmKeccak256, late, (ASM_KECCAK256)),);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/unsafe_cheatcodes.rs | crates/lint/src/sol/info/unsafe_cheatcodes.rs | use super::UnsafeCheatcodes;
use crate::{
linter::{EarlyLintPass, LintContext},
sol::{Severity, SolLint},
};
use solar::ast::{Expr, ExprKind};
declare_forge_lint!(
UNSAFE_CHEATCODE_USAGE,
Severity::Info,
"unsafe-cheatcode",
"usage of unsafe cheatcodes that can perform dangerous operations"
);
const UNSAFE_CHEATCODES: [&str; 9] = [
"ffi",
"readFile",
"readLine",
"writeFile",
"writeLine",
"removeFile",
"closeFile",
"setEnv",
"deriveKey",
];
impl<'ast> EarlyLintPass<'ast> for UnsafeCheatcodes {
fn check_expr(&mut self, ctx: &LintContext, expr: &'ast Expr<'ast>) {
if let ExprKind::Call(lhs, _args) = &expr.kind
&& let ExprKind::Member(_lhs, member) = &lhs.kind
&& UNSAFE_CHEATCODES.iter().any(|&c| c == member.as_str())
{
ctx.emit(&UNSAFE_CHEATCODE_USAGE, member.span);
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/named_struct_fields.rs | crates/lint/src/sol/info/named_struct_fields.rs | use solar::sema::hir::{CallArgs, CallArgsKind, Expr, ExprKind, ItemId, Res};
use crate::{
linter::{LateLintPass, LintContext, Suggestion},
sol::{Severity, SolLint, info::NamedStructFields},
};
declare_forge_lint!(
NAMED_STRUCT_FIELDS,
Severity::Info,
"named-struct-fields",
"prefer initializing structs with named fields"
);
impl<'hir> LateLintPass<'hir> for NamedStructFields {
fn check_expr(
&mut self,
ctx: &LintContext,
hir: &'hir solar::sema::hir::Hir<'hir>,
expr: &'hir solar::sema::hir::Expr<'hir>,
) {
let ExprKind::Call(
Expr { kind: ExprKind::Ident([Res::Item(ItemId::Struct(struct_id))]), span, .. },
CallArgs { kind: CallArgsKind::Unnamed(args), .. },
_,
) = &expr.kind
else {
return;
};
let strukt = hir.strukt(*struct_id);
let fields = &strukt.fields;
// Basic sanity conditions for a consistent auto-fix
if fields.len() != args.len() || fields.is_empty() {
// Emit without suggestion
ctx.emit(&NAMED_STRUCT_FIELDS, expr.span);
return;
}
// Get struct name snippet and emit without suggestion if we can't get it
let Some(struct_name_snippet) = ctx.span_to_snippet(*span) else {
// Emit without suggestion if we can't get the struct name snippet
ctx.emit(&NAMED_STRUCT_FIELDS, expr.span);
return;
};
// Collect field names and corresponding argument source snippets
let mut field_assignments = Vec::new();
for (field_id, arg) in fields.iter().zip(args.iter()) {
let field = hir.variable(*field_id);
let Some((arg_snippet, field_name)) =
ctx.span_to_snippet(arg.span).zip(field.name.map(|n| n.to_string()))
else {
// Emit without suggestion if we can't get argument snippet
ctx.emit(&NAMED_STRUCT_FIELDS, expr.span);
return;
};
field_assignments.push(format!("{field_name}: {arg_snippet}"));
}
ctx.emit_with_suggestion(
&NAMED_STRUCT_FIELDS,
expr.span,
Suggestion::fix(
format!("{}({{ {} }})", struct_name_snippet, field_assignments.join(", ")),
solar::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("consider using named fields"),
);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/imports.rs | crates/lint/src/sol/info/imports.rs | use solar::{
ast::{self as ast, SourceUnit, Span, Symbol, visit::Visit},
data_structures::map::FxIndexSet,
interface::SourceMap,
};
use std::ops::ControlFlow;
use super::Imports;
use crate::{
linter::{EarlyLintPass, LintContext},
sol::{Severity, SolLint},
};
declare_forge_lint!(
UNUSED_IMPORT,
Severity::Info,
"unused-import",
"unused imports should be removed"
);
declare_forge_lint!(
UNALIASED_PLAIN_IMPORT,
Severity::Info,
"unaliased-plain-import",
"use named imports '{A, B}' or alias 'import \"..\" as X'"
);
impl<'ast> EarlyLintPass<'ast> for Imports {
fn check_import_directive(
&mut self,
ctx: &LintContext,
import: &'ast ast::ImportDirective<'ast>,
) {
// Non-aliased plain imports like `import "File.sol";`.
if let ast::ImportItems::Plain(_) = &import.items
&& import.source_alias().is_none()
{
ctx.emit(&UNALIASED_PLAIN_IMPORT, import.path.span);
}
}
fn check_full_source_unit(&mut self, ctx: &LintContext<'ast, '_>, ast: &'ast SourceUnit<'ast>) {
// Despite disabled lints are filtered inside `ctx.emit()`, we explicitly check
// upfront to avoid the expensive full source unit traversal when unnecessary.
if ctx.is_lint_enabled(UNUSED_IMPORT.id) {
let mut checker = UnusedChecker::new(ctx.session().source_map());
let _ = checker.visit_source_unit(ast);
checker.check_unused_imports(ast, ctx);
checker.clear();
}
}
}
/// Visitor that collects all used symbols in a source unit.
struct UnusedChecker<'ast> {
used_symbols: FxIndexSet<Symbol>,
source_map: &'ast SourceMap,
}
impl<'ast> UnusedChecker<'ast> {
fn new(source_map: &'ast SourceMap) -> Self {
Self { source_map, used_symbols: Default::default() }
}
fn clear(&mut self) {
self.used_symbols.clear();
}
/// Mark a symbol as used in a source.
fn mark_symbol_used(&mut self, symbol: Symbol) {
self.used_symbols.insert(symbol);
}
/// Check for unused imports and emit warnings.
fn check_unused_imports(&self, ast: &SourceUnit<'_>, ctx: &LintContext) {
for item in ast.items.iter() {
let span = item.span;
let ast::ItemKind::Import(import) = &item.kind else { continue };
match &import.items {
ast::ImportItems::Plain(_) | ast::ImportItems::Glob(_) => {
if let Some(alias) = import.source_alias()
&& !self.used_symbols.contains(&alias.name)
{
self.unused_import(ctx, span);
}
}
ast::ImportItems::Aliases(symbols) => {
for &(orig, alias) in symbols.iter() {
let name = alias.unwrap_or(orig);
if !self.used_symbols.contains(&name.name) {
self.unused_import(ctx, orig.span.to(name.span));
}
}
}
}
}
}
fn unused_import(&self, ctx: &LintContext, span: Span) {
ctx.emit(&UNUSED_IMPORT, span);
}
}
impl<'ast> Visit<'ast> for UnusedChecker<'ast> {
type BreakValue = solar::data_structures::Never;
fn visit_item(&mut self, item: &'ast ast::Item<'ast>) -> ControlFlow<Self::BreakValue> {
if let ast::ItemKind::Import(_) = &item.kind {
return ControlFlow::Continue(());
}
self.walk_item(item)
}
fn visit_using_directive(
&mut self,
using: &'ast ast::UsingDirective<'ast>,
) -> ControlFlow<Self::BreakValue> {
match &using.list {
ast::UsingList::Single(path) => {
self.mark_symbol_used(path.first().name);
}
ast::UsingList::Multiple(items) => {
for (path, _) in items.iter() {
self.mark_symbol_used(path.first().name);
}
}
}
self.walk_using_directive(using)
}
fn visit_expr(&mut self, expr: &'ast ast::Expr<'ast>) -> ControlFlow<Self::BreakValue> {
if let ast::ExprKind::Ident(id) = expr.kind {
self.mark_symbol_used(id.name);
}
self.walk_expr(expr)
}
fn visit_path(&mut self, path: &'ast ast::PathSlice) -> ControlFlow<Self::BreakValue> {
for id in path.segments() {
self.mark_symbol_used(id.name);
}
self.walk_path(path)
}
fn visit_ty(&mut self, ty: &'ast ast::Type<'ast>) -> ControlFlow<Self::BreakValue> {
if let ast::TypeKind::Custom(path) = &ty.kind {
self.mark_symbol_used(path.first().name);
}
self.walk_ty(ty)
}
fn visit_doc_comment(
&mut self,
cmnt: &'ast solar::ast::DocComment,
) -> ControlFlow<Self::BreakValue> {
if let Ok(snip) = self.source_map.span_to_snippet(cmnt.span) {
for line in snip.lines() {
if let Some((_, relevant)) = line.split_once("@inheritdoc") {
self.mark_symbol_used(Symbol::intern(relevant.trim()));
}
}
}
ControlFlow::Continue(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/pascal_case.rs | crates/lint/src/sol/info/pascal_case.rs | use super::PascalCaseStruct;
use crate::{
linter::{EarlyLintPass, LintContext, Suggestion},
sol::{Severity, SolLint},
};
use solar::ast::ItemStruct;
declare_forge_lint!(
PASCAL_CASE_STRUCT,
Severity::Info,
"pascal-case-struct",
"structs should use PascalCase"
);
impl<'ast> EarlyLintPass<'ast> for PascalCaseStruct {
fn check_item_struct(&mut self, ctx: &LintContext, strukt: &'ast ItemStruct<'ast>) {
let name = strukt.name.as_str();
if let Some(expected) = check_pascal_case(name) {
ctx.emit_with_suggestion(
&PASCAL_CASE_STRUCT,
strukt.name.span,
Suggestion::fix(
expected,
solar::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("consider using"),
);
}
}
}
/// If the string `s` is not PascalCase, returns a `Some(String)` with the
/// suggested conversion. Otherwise, returns `None`.
pub fn check_pascal_case(s: &str) -> Option<String> {
if s.len() <= 1 {
return None;
}
let expected = heck::AsPascalCase(s).to_string();
if s == expected.as_str() { None } else { Some(expected) }
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/mod.rs | crates/lint/src/sol/info/mod.rs | use crate::sol::{EarlyLintPass, LateLintPass, SolLint};
mod mixed_case;
use mixed_case::{MIXED_CASE_FUNCTION, MIXED_CASE_VARIABLE};
mod pascal_case;
use pascal_case::PASCAL_CASE_STRUCT;
mod screaming_snake_case;
use screaming_snake_case::{SCREAMING_SNAKE_CASE_CONSTANT, SCREAMING_SNAKE_CASE_IMMUTABLE};
mod imports;
use imports::{UNALIASED_PLAIN_IMPORT, UNUSED_IMPORT};
mod named_struct_fields;
use named_struct_fields::NAMED_STRUCT_FIELDS;
mod unsafe_cheatcodes;
use unsafe_cheatcodes::UNSAFE_CHEATCODE_USAGE;
register_lints!(
(PascalCaseStruct, early, (PASCAL_CASE_STRUCT)),
(MixedCaseVariable, early, (MIXED_CASE_VARIABLE)),
(MixedCaseFunction, early, (MIXED_CASE_FUNCTION)),
(ScreamingSnakeCase, early, (SCREAMING_SNAKE_CASE_CONSTANT, SCREAMING_SNAKE_CASE_IMMUTABLE)),
(Imports, early, (UNALIASED_PLAIN_IMPORT, UNUSED_IMPORT)),
(NamedStructFields, late, (NAMED_STRUCT_FIELDS)),
(UnsafeCheatcodes, early, (UNSAFE_CHEATCODE_USAGE))
);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/mixed_case.rs | crates/lint/src/sol/info/mixed_case.rs | use super::{MixedCaseFunction, MixedCaseVariable};
use crate::{
linter::{EarlyLintPass, LintContext, Suggestion},
sol::{Severity, SolLint, info::screaming_snake_case::check_screaming_snake_case},
};
use solar::ast::{FunctionHeader, ItemFunction, VariableDefinition, Visibility};
declare_forge_lint!(
MIXED_CASE_FUNCTION,
Severity::Info,
"mixed-case-function",
"function names should use mixedCase"
);
impl<'ast> EarlyLintPass<'ast> for MixedCaseFunction {
fn check_item_function(&mut self, ctx: &LintContext, func: &'ast ItemFunction<'ast>) {
if let Some(name) = func.header.name
&& let Some(expected) =
check_mixed_case(name.as_str(), true, ctx.config.mixed_case_exceptions)
&& !is_constant_getter(&func.header)
{
ctx.emit_with_suggestion(
&MIXED_CASE_FUNCTION,
name.span,
Suggestion::fix(
expected,
solar::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("consider using"),
);
}
}
}
declare_forge_lint!(
MIXED_CASE_VARIABLE,
Severity::Info,
"mixed-case-variable",
"mutable variables should use mixedCase"
);
impl<'ast> EarlyLintPass<'ast> for MixedCaseVariable {
fn check_variable_definition(
&mut self,
ctx: &LintContext,
var: &'ast VariableDefinition<'ast>,
) {
if var.mutability.is_none()
&& let Some(name) = var.name
&& let Some(expected) =
check_mixed_case(name.as_str(), false, ctx.config.mixed_case_exceptions)
{
ctx.emit_with_suggestion(
&MIXED_CASE_VARIABLE,
name.span,
Suggestion::fix(
expected,
solar::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("consider using"),
);
}
}
}
/// If the string `s` is not mixedCase, returns a `Some(String)` with the
/// suggested conversion. Otherwise, returns `None`.
///
/// To avoid false positives:
/// - lowercase strings like `fn increment()` or `uint256 counter`, are treated as mixedCase.
/// - test functions starting with `test`, `invariant_` or `statefulFuzz` are ignored.
/// - user-defined patterns like `ERC20` are allowed.
fn check_mixed_case(s: &str, is_fn: bool, allowed_patterns: &[String]) -> Option<String> {
if s.len() <= 1 {
return None;
}
// Exception for test, invariant, and stateful fuzzing functions.
if is_fn
&& (s.starts_with("test") || s.starts_with("invariant_") || s.starts_with("statefulFuzz"))
{
return None;
}
// Exception for user-defined infix patterns.
for pattern in allowed_patterns {
if let Some(pos) = s.find(pattern.as_str()) {
let (pre, post) = s.split_at(pos);
let post = &post[pattern.len()..];
// Check if the part before the pattern is valid lowerCamelCase.
let is_pre_valid = pre == heck::AsLowerCamelCase(pre).to_string();
// Check if the part after is valid UpperCamelCase (allowing leading numbers).
let post_trimmed = post.trim_start_matches(|c: char| c.is_numeric());
let is_post_valid = post_trimmed == heck::AsUpperCamelCase(post_trimmed).to_string();
if is_pre_valid && is_post_valid {
return None;
}
}
}
// Generate the expected mixedCase version.
let suggestion = format!(
"{prefix}{name}{suffix}",
prefix = if s.starts_with('_') { "_" } else { "" },
name = heck::AsLowerCamelCase(s),
suffix = if s.ends_with('_') { "_" } else { "" }
);
// If the original string already matches the suggestion, it's valid.
if s == suggestion { None } else { Some(suggestion) }
}
/// Checks if a function getter is a valid constant getter with a heuristic:
/// * name is `SCREAMING_SNAKE_CASE`
/// * external view visibility and mutability.
/// * zero parameters.
/// * exactly one return value.
/// * return value is an elementary or a custom type
fn is_constant_getter(header: &FunctionHeader<'_>) -> bool {
header.visibility().is_some_and(|v| matches!(v, Visibility::External))
&& header.state_mutability().is_view()
&& header.parameters.is_empty()
&& header.returns().len() == 1
&& header
.returns()
.first()
.is_some_and(|ret| ret.ty.kind.is_elementary() || ret.ty.kind.is_custom())
&& check_screaming_snake_case(header.name.unwrap().as_str()).is_none()
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/lint/src/sol/info/screaming_snake_case.rs | crates/lint/src/sol/info/screaming_snake_case.rs | use super::ScreamingSnakeCase;
use crate::{
linter::{EarlyLintPass, LintContext, Suggestion},
sol::{Severity, SolLint},
};
use solar::ast::{VarMut, VariableDefinition};
declare_forge_lint!(
SCREAMING_SNAKE_CASE_CONSTANT,
Severity::Info,
"screaming-snake-case-const",
"constants should use SCREAMING_SNAKE_CASE"
);
declare_forge_lint!(
SCREAMING_SNAKE_CASE_IMMUTABLE,
Severity::Info,
"screaming-snake-case-immutable",
"immutables should use SCREAMING_SNAKE_CASE"
);
impl<'ast> EarlyLintPass<'ast> for ScreamingSnakeCase {
fn check_variable_definition(
&mut self,
ctx: &LintContext,
var: &'ast VariableDefinition<'ast>,
) {
if let (Some(name), Some(mutability)) = (var.name, var.mutability)
&& let Some(expected) = check_screaming_snake_case(name.as_str())
{
let suggestion = Suggestion::fix(
expected,
solar::interface::diagnostics::Applicability::MachineApplicable,
)
.with_desc("consider using");
match mutability {
VarMut::Constant => {
ctx.emit_with_suggestion(&SCREAMING_SNAKE_CASE_CONSTANT, name.span, suggestion)
}
VarMut::Immutable => {
ctx.emit_with_suggestion(&SCREAMING_SNAKE_CASE_IMMUTABLE, name.span, suggestion)
}
}
}
}
}
/// If the string `s` is not SCREAMING_SNAKE_CASE, returns a `Some(String)` with the suggested
/// conversion. Otherwise, returns `None`.
pub fn check_screaming_snake_case(s: &str) -> Option<String> {
if s.len() <= 1 {
return None;
}
// Handle leading/trailing underscores like `heck` does
let expected = format!(
"{prefix}{name}{suffix}",
prefix = if s.starts_with("_") { "_" } else { "" },
name = heck::AsShoutySnakeCase(s),
suffix = if s.ends_with("_") { "_" } else { "" }
);
if s == expected { None } else { Some(expected) }
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/macros/src/lib.rs | crates/macros/src/lib.rs | //! # foundry-macros
//!
//! Internal Foundry proc-macros.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate proc_macro_error2;
use proc_macro::TokenStream;
use syn::{DeriveInput, Error, parse_macro_input};
mod cheatcodes;
mod console_fmt;
#[proc_macro_derive(ConsoleFmt)]
pub fn console_fmt(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
console_fmt::console_fmt(&input).into()
}
#[proc_macro_derive(Cheatcode, attributes(cheatcode))]
#[proc_macro_error]
pub fn cheatcode(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
cheatcodes::derive_cheatcode(&input).unwrap_or_else(Error::into_compile_error).into()
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/macros/src/console_fmt.rs | crates/macros/src/console_fmt.rs | use proc_macro2::{Delimiter, Group, Ident, TokenStream};
use quote::{format_ident, quote};
use syn::{
Data, DataEnum, DataStruct, DeriveInput, Fields, Member, Token, Type, punctuated::Punctuated,
};
pub fn console_fmt(input: &DeriveInput) -> TokenStream {
let name = &input.ident;
let tokens = match &input.data {
Data::Struct(s) => derive_struct(s),
Data::Enum(e) => derive_enum(e),
Data::Union(_) => return quote!(compile_error!("Unions are unsupported");),
};
quote! {
impl ConsoleFmt for #name {
#tokens
}
}
}
fn derive_struct(s: &DataStruct) -> TokenStream {
let imp = impl_struct(s).unwrap_or_else(|| quote!(String::new()));
quote! {
fn fmt(&self, _spec: FormatSpec) -> String {
#imp
}
}
}
fn impl_struct(s: &DataStruct) -> Option<TokenStream> {
if s.fields.is_empty() {
return None;
}
if matches!(s.fields, Fields::Unit) {
return None;
}
let fields = s.fields.iter().collect::<Vec<_>>();
let first_ty = match &fields.first().unwrap().ty {
Type::Path(path) => path.path.segments.last().unwrap().ident.to_string(),
_ => String::new(),
};
let members = s.fields.members().collect::<Vec<_>>();
let args: Punctuated<TokenStream, Token![,]> = members
.into_iter()
.map(|member| match member {
Member::Named(ident) => quote!(&self.#ident),
// For Tuple structs generated by the sol!.
// These are generated only in case of a single unnamed field, hence it is safe to
// hardcode the index to `.0`.
Member::Unnamed(_) => quote!(&self.0),
})
.collect();
let imp = if first_ty == "String" {
// console_format(arg1, [...rest])
let mut args = args.pairs();
let first = args.next().unwrap();
let first = first.value();
quote! {
console_format((#first).as_str(), &[#(#args)*])
}
} else {
// console_format("", [...args])
quote! {
console_format("", &[#args])
}
};
Some(imp)
}
/// Delegates to variants.
fn derive_enum(e: &DataEnum) -> TokenStream {
let arms = e.variants.iter().map(|variant| {
let name = &variant.ident;
let (fields, delimiter) = match &variant.fields {
Fields::Named(fields) => (fields.named.iter(), Delimiter::Brace),
Fields::Unnamed(fields) => (fields.unnamed.iter(), Delimiter::Parenthesis),
Fields::Unit => return quote!(),
};
let fields: Punctuated<Ident, Token![,]> = fields
.enumerate()
.map(|(i, field)| {
field.ident.as_ref().cloned().unwrap_or_else(|| format_ident!("__var_{i}"))
})
.collect();
if fields.len() != 1 {
unimplemented!("Enum variant with more than 1 field")
}
let field = fields.into_iter().next().unwrap();
let fields = Group::new(delimiter, quote!(#field));
quote! {
Self::#name #fields => ConsoleFmt::fmt(#field, spec),
}
});
quote! {
fn fmt(&self, spec: FormatSpec) -> String {
match self {
#(#arms)*
#[expect(unreachable_code)]
_ => String::new(),
}
}
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/macros/src/cheatcodes.rs | crates/macros/src/cheatcodes.rs | use proc_macro2::{Ident, Span, TokenStream};
use quote::quote;
use syn::{Attribute, Data, DataStruct, DeriveInput, Error, Result};
// TODO: `proc_macro_error2` only emits warnings when feature "nightly" is enabled, which we can't
// practically enable.
macro_rules! emit_warning {
($($t:tt)*) => {
proc_macro_error2::emit_error! { $($t)* }
};
}
pub fn derive_cheatcode(input: &DeriveInput) -> Result<TokenStream> {
let name = &input.ident;
let name_s = name.to_string();
match &input.data {
Data::Struct(s) if name_s.ends_with("Call") => derive_call(name, s, &input.attrs),
Data::Struct(_) if name_s.ends_with("Return") => Ok(TokenStream::new()),
Data::Struct(s) => derive_struct(name, s, &input.attrs),
Data::Enum(e) if name_s.ends_with("Calls") => derive_calls_enum(name, e),
Data::Enum(e) if name_s.ends_with("Errors") => derive_errors_events_enum(name, e, false),
Data::Enum(e) if name_s.ends_with("Events") => derive_errors_events_enum(name, e, true),
Data::Enum(e) => derive_enum(name, e, &input.attrs),
Data::Union(_) => Err(Error::new(name.span(), "unions are not supported")),
}
}
/// Implements `CheatcodeDef` for a function call struct.
fn derive_call(name: &Ident, data: &DataStruct, attrs: &[Attribute]) -> Result<TokenStream> {
let mut group = None::<Ident>;
let mut status = None::<TokenStream>;
let mut safety = None::<Ident>;
for attr in attrs.iter().filter(|a| a.path().is_ident("cheatcode")) {
attr.meta.require_list()?.parse_nested_meta(|meta| {
let path = meta.path.get_ident().ok_or_else(|| meta.error("expected ident"))?;
let path_s = path.to_string();
match path_s.as_str() {
"group" if group.is_none() => group = Some(meta.value()?.parse()?),
"status" if status.is_none() => status = Some(meta.value()?.parse()?),
"safety" if safety.is_none() => safety = Some(meta.value()?.parse()?),
_ => return Err(meta.error("unexpected attribute")),
};
Ok(())
})?;
}
let group = group.ok_or_else(|| {
syn::Error::new(name.span(), "missing #[cheatcode(group = ...)] attribute")
})?;
let status = status.unwrap_or_else(|| quote!(Stable));
let safety = if let Some(safety) = safety {
quote!(Safety::#safety)
} else {
quote! {
match Group::#group.safety() {
Some(s) => s,
None => panic_unknown_safety(),
}
}
};
check_named_fields(data, name);
let id = name.to_string();
let id = id.strip_suffix("Call").expect("function struct ends in Call");
let doc = get_docstring(attrs);
let (signature, selector, declaration, description) = func_docstring(&doc);
let mut params = declaration;
if let Some(ret) = params.find(" returns ") {
params = ¶ms[..ret];
}
if params.contains(" memory ") {
emit_warning!(
name.span(),
"parameter data locations must be `calldata` instead of `memory`"
);
}
let (visibility, mutability) = parse_function_attrs(declaration, name.span())?;
let visibility = Ident::new(visibility, Span::call_site());
let mutability = Ident::new(mutability, Span::call_site());
if description.is_empty() {
emit_warning!(name.span(), "missing documentation for a cheatcode")
}
let description = description.replace("\n ", "\n");
Ok(quote! {
impl CheatcodeDef for #name {
const CHEATCODE: &'static Cheatcode<'static> = &Cheatcode {
func: Function {
id: #id,
description: #description,
declaration: #declaration,
visibility: Visibility::#visibility,
mutability: Mutability::#mutability,
signature: #signature,
selector: #selector,
selector_bytes: <Self as ::alloy_sol_types::SolCall>::SELECTOR,
},
group: Group::#group,
status: Status::#status,
safety: #safety,
};
}
})
}
/// Generates the `CHEATCODES` constant and implements `CheatcodeImpl` dispatch for an enum.
fn derive_calls_enum(name: &Ident, input: &syn::DataEnum) -> Result<TokenStream> {
if input.variants.iter().any(|v| v.fields.len() != 1) {
return Err(syn::Error::new(name.span(), "expected all variants to have a single field"));
}
// keep original order for matching
let variant_names = input.variants.iter().map(|v| &v.ident);
let mut variants = input.variants.iter().collect::<Vec<_>>();
variants.sort_by(|a, b| a.ident.cmp(&b.ident));
let variant_tys = variants.iter().map(|v| {
assert_eq!(v.fields.len(), 1);
&v.fields.iter().next().unwrap().ty
});
Ok(quote! {
/// All the cheatcodes in [this contract](self).
pub const CHEATCODES: &'static [&'static Cheatcode<'static>] = &[#(<#variant_tys as CheatcodeDef>::CHEATCODE,)*];
/// Internal macro to implement the `Cheatcode` trait for the Vm calls enum.
#[doc(hidden)]
#[macro_export]
macro_rules! vm_calls {
($mac:ident) => {
$mac!(#(#variant_names),*)
};
}
})
}
fn derive_errors_events_enum(
name: &Ident,
input: &syn::DataEnum,
events: bool,
) -> Result<TokenStream> {
if input.variants.iter().any(|v| v.fields.len() != 1) {
return Err(syn::Error::new(name.span(), "expected all variants to have a single field"));
}
let (ident, ty_assoc_name, ty, doc) = if events {
("VM_EVENTS", "EVENT", "Event", "events")
} else {
("VM_ERRORS", "ERROR", "Error", "custom errors")
};
let ident = Ident::new(ident, Span::call_site());
let ty_assoc_name = Ident::new(ty_assoc_name, Span::call_site());
let ty = Ident::new(ty, Span::call_site());
let doc = format!("All the {doc} in [this contract](self).");
let mut variants = input.variants.iter().collect::<Vec<_>>();
variants.sort_by(|a, b| a.ident.cmp(&b.ident));
let variant_tys = variants.iter().map(|v| {
assert_eq!(v.fields.len(), 1);
&v.fields.iter().next().unwrap().ty
});
Ok(quote! {
#[doc = #doc]
pub const #ident: &'static [&'static #ty<'static>] = &[#(#variant_tys::#ty_assoc_name,)*];
})
}
fn derive_struct(
name: &Ident,
input: &syn::DataStruct,
attrs: &[Attribute],
) -> Result<TokenStream> {
let name_s = name.to_string();
let doc = get_docstring(attrs);
let doc = doc.trim();
let kind = match () {
() if doc.contains("Custom error ") => StructKind::Error,
() if doc.contains("Event ") => StructKind::Event,
_ => StructKind::Struct,
};
let (doc, def) = doc.split_once("```solidity\n").expect("bad docstring");
let mut doc = doc.trim_end();
let def_end = def.rfind("```").expect("bad docstring");
let def = def[..def_end].trim();
match kind {
StructKind::Error => doc = &doc[..doc.find("Custom error ").expect("bad doc")],
StructKind::Event => doc = &doc[..doc.find("Event ").expect("bad doc")],
StructKind::Struct => {}
}
let doc = doc.trim();
if doc.is_empty() {
let n = match kind {
StructKind::Error => "n",
StructKind::Event => "n",
StructKind::Struct => "",
};
emit_warning!(name.span(), "missing documentation for a{n} {}", kind.as_str());
}
if kind == StructKind::Struct {
check_named_fields(input, name);
}
let def = match kind {
StructKind::Struct => {
let fields = input.fields.iter().map(|f| {
let name = f.ident.as_ref().expect("field has no name").to_string();
let to_find = format!("{name};");
let ty_end = def.find(&to_find).expect("field not found in def");
let ty = &def[..ty_end];
let ty_start = ty.rfind(';').or_else(|| ty.find('{')).expect("bad struct def") + 1;
let ty = ty[ty_start..].trim();
if ty.is_empty() {
panic!("bad struct def: {def:?}")
}
let doc = get_docstring(&f.attrs);
let doc = doc.trim();
quote! {
StructField {
name: #name,
ty: #ty,
description: #doc,
}
}
});
quote! {
/// The struct definition.
pub const STRUCT: &'static Struct<'static> = &Struct {
name: #name_s,
description: #doc,
fields: Cow::Borrowed(&[#(#fields),*]),
};
}
}
StructKind::Error => {
quote! {
/// The custom error definition.
pub const ERROR: &'static Error<'static> = &Error {
name: #name_s,
description: #doc,
declaration: #def,
};
}
}
StructKind::Event => {
quote! {
/// The event definition.
pub const EVENT: &'static Event<'static> = &Event {
name: #name_s,
description: #doc,
declaration: #def,
};
}
}
};
Ok(quote! {
impl #name {
#def
}
})
}
#[derive(Clone, Copy, PartialEq, Eq)]
enum StructKind {
Struct,
Error,
Event,
}
impl StructKind {
fn as_str(self) -> &'static str {
match self {
Self::Struct => "struct",
Self::Error => "error",
Self::Event => "event",
}
}
}
fn derive_enum(name: &Ident, input: &syn::DataEnum, attrs: &[Attribute]) -> Result<TokenStream> {
let name_s = name.to_string();
let doc = get_docstring(attrs);
let doc_end = doc.find("```solidity").expect("bad docstring");
let doc = doc[..doc_end].trim();
if doc.is_empty() {
emit_warning!(name.span(), "missing documentation for an enum");
}
let variants = input.variants.iter().filter(|v| v.discriminant.is_none()).map(|v| {
let name = v.ident.to_string();
let doc = get_docstring(&v.attrs);
let doc = doc.trim();
if doc.is_empty() {
emit_warning!(v.ident.span(), "missing documentation for a variant");
}
quote! {
EnumVariant {
name: #name,
description: #doc,
}
}
});
Ok(quote! {
impl #name {
/// The enum definition.
pub const ENUM: &'static Enum<'static> = &Enum {
name: #name_s,
description: #doc,
variants: Cow::Borrowed(&[#(#variants),*]),
};
}
})
}
fn check_named_fields(data: &DataStruct, ident: &Ident) {
for field in &data.fields {
if field.ident.is_none() {
emit_warning!(ident, "all params must be named");
}
}
}
/// Flattens all the `#[doc = "..."]` attributes into a single string.
fn get_docstring(attrs: &[Attribute]) -> String {
let mut doc = String::new();
for attr in attrs {
if !attr.path().is_ident("doc") {
continue;
}
let syn::Meta::NameValue(syn::MetaNameValue {
value: syn::Expr::Lit(syn::ExprLit { lit: syn::Lit::Str(s), .. }),
..
}) = &attr.meta
else {
continue;
};
let value = s.value();
if !value.is_empty() {
if !doc.is_empty() {
doc.push('\n');
}
doc.push_str(&value);
}
}
doc
}
/// Returns `(signature, hex_selector, declaration, description)` from a given `sol!`-generated
/// docstring for a function.
///
/// # Examples
///
/// The following docstring (string literals are joined with newlines):
/// ```text
/// "Function with signature `foo(uint256)` and selector `0x1234abcd`."
/// "```solidity"
/// "function foo(uint256 x) external view returns (bool y);"
/// "```"
/// "Description of the function."
/// ```
///
/// Will return:
/// ```text
/// (
/// "foo(uint256)",
/// "0x1234abcd",
/// "function foo(uint256 x) external view returns (bool y);",
/// "Description of the function."
/// )
/// ```
fn func_docstring(doc: &str) -> (&str, &str, &str, &str) {
let expected_start = "Function with signature `";
let start = doc.find(expected_start).expect("no auto docstring");
let (descr_before, auto) = doc.split_at(start);
let mut lines = auto.lines();
let mut next = || lines.next().expect("unexpected end of docstring");
let sig_line = next();
let example_start = next();
assert_eq!(example_start, "```solidity");
let declaration = next();
let example_end = next();
assert_eq!(example_end, "```");
let n = expected_start.len();
let mut sig_end = n;
sig_end += sig_line[n..].find('`').unwrap();
let sig = &sig_line[n..sig_end];
assert!(!sig.starts_with('`') && !sig.ends_with('`'));
let selector_end = sig_line.rfind('`').unwrap();
let selector = sig_line[sig_end..selector_end].strip_prefix("` and selector `").unwrap();
assert!(!selector.starts_with('`') && !selector.ends_with('`'));
assert!(selector.starts_with("0x"));
let description = match doc.find("```\n") {
Some(i) => &doc[i + 4..],
None => descr_before,
};
(sig, selector, declaration, description.trim())
}
/// Returns `(visibility, mutability)` from a given Solidity function declaration.
fn parse_function_attrs(f: &str, span: Span) -> Result<(&str, &str)> {
let Some(ext_start) = f.find("external") else {
return Err(Error::new(span, "functions must have `external` visibility"));
};
let visibility = "External";
let f = &f[ext_start..];
let mutability = if f.contains("view") {
"View"
} else if f.contains("pure") {
"Pure"
} else {
"None"
};
Ok((visibility, mutability))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script-sequence/src/reader.rs | crates/script-sequence/src/reader.rs | use crate::{ScriptSequence, TransactionWithMetadata};
use alloy_network::AnyTransactionReceipt;
use eyre::{Result, bail};
use foundry_common::fs;
use revm_inspectors::tracing::types::CallKind;
use std::path::{Component, Path, PathBuf};
/// This type reads broadcast files in the
/// `project_root/broadcast/{contract_name}.s.sol/{chain_id}/` directory.
///
/// It consists methods that filter and search for transactions in the broadcast files that match a
/// `transactionType` if provided.
///
/// Note:
///
/// It only returns transactions for which there exists a corresponding receipt in the broadcast.
#[derive(Debug, Clone)]
pub struct BroadcastReader {
contract_name: String,
chain_id: u64,
tx_type: Vec<CallKind>,
broadcast_path: PathBuf,
}
impl BroadcastReader {
/// Create a new `BroadcastReader` instance.
pub fn new(contract_name: String, chain_id: u64, broadcast_path: &Path) -> Result<Self> {
if !broadcast_path.is_dir() {
bail!("broadcast dir does not exist or is not a directory");
}
Ok(Self {
contract_name,
chain_id,
tx_type: Default::default(),
broadcast_path: broadcast_path.to_path_buf(),
})
}
/// Set the transaction type to filter by.
pub fn with_tx_type(mut self, tx_type: CallKind) -> Self {
self.tx_type.push(tx_type);
self
}
/// Read all broadcast files in the broadcast directory.
///
/// Example structure:
///
/// project-root/broadcast/{script_name}.s.sol/{chain_id}/*.json
/// project-root/broadcast/multi/{multichain_script_name}.s.sol-{timestamp}/deploy.json
pub fn read(&self) -> eyre::Result<Vec<ScriptSequence>> {
// 1. Recursively read all .json files in the broadcast directory
let mut broadcasts = vec![];
for entry in walkdir::WalkDir::new(&self.broadcast_path).into_iter() {
let entry = entry?;
let path = entry.path();
if path.is_file() && path.extension().is_some_and(|ext| ext == "json") {
// Ignore -latest to avoid duplicating broadcast entries
if path.components().any(|c| c.as_os_str().to_string_lossy().contains("-latest")) {
continue;
}
// Detect Multichain broadcasts using "multi" in the path
if path.components().any(|c| c == Component::Normal("multi".as_ref())) {
// Parse as MultiScriptSequence
let broadcast = fs::read_json_file::<serde_json::Value>(path)?;
let multichain_deployments = broadcast
.get("deployments")
.and_then(|deployments| {
serde_json::from_value::<Vec<ScriptSequence>>(deployments.clone()).ok()
})
.unwrap_or_default();
broadcasts.extend(multichain_deployments);
continue;
}
let broadcast = fs::read_json_file::<ScriptSequence>(path)?;
broadcasts.push(broadcast);
}
}
let broadcasts = self.filter_and_sort(broadcasts);
Ok(broadcasts)
}
/// Attempts read the latest broadcast file in the broadcast directory.
///
/// This may be the `run-latest.json` file or the broadcast file with the latest timestamp.
pub fn read_latest(&self) -> eyre::Result<ScriptSequence> {
let broadcasts = self.read()?;
// Find the broadcast with the latest timestamp
let target = broadcasts
.into_iter()
.max_by_key(|broadcast| broadcast.timestamp)
.ok_or_else(|| eyre::eyre!("No broadcasts found"))?;
Ok(target)
}
/// Applies the filters and sorts the broadcasts by descending timestamp.
pub fn filter_and_sort(&self, broadcasts: Vec<ScriptSequence>) -> Vec<ScriptSequence> {
// Apply the filters
let mut seqs = broadcasts
.into_iter()
.filter(|broadcast| {
if broadcast.chain != self.chain_id {
return false;
}
broadcast.transactions.iter().any(move |tx| {
let name_filter =
tx.contract_name.as_ref().is_some_and(|cn| *cn == self.contract_name);
let type_filter = self.tx_type.is_empty() || self.tx_type.contains(&tx.opcode);
name_filter && type_filter
})
})
.collect::<Vec<_>>();
// Sort by descending timestamp
seqs.sort_by(|a, b| b.timestamp.cmp(&a.timestamp));
seqs
}
/// Search for transactions in the broadcast that match the specified `contractName` and
/// `txType`.
///
/// It cross-checks the transactions with their corresponding receipts in the broadcast and
/// returns the result.
///
/// Transactions that don't have a corresponding receipt are ignored.
///
/// Sorts the transactions by descending block number.
pub fn into_tx_receipts(
&self,
broadcast: ScriptSequence,
) -> Vec<(TransactionWithMetadata, AnyTransactionReceipt)> {
let ScriptSequence { transactions, receipts, .. } = broadcast;
let mut targets = Vec::new();
for tx in transactions.into_iter().filter(|tx| {
let name_filter = tx.contract_name.as_ref().is_some_and(|cn| *cn == self.contract_name);
let type_filter = self.tx_type.is_empty() || self.tx_type.contains(&tx.opcode);
name_filter && type_filter
}) {
let maybe_receipt = receipts
.iter()
.find(|receipt| tx.hash.is_some_and(|hash| hash == receipt.transaction_hash));
if let Some(receipt) = maybe_receipt {
targets.push((tx, receipt.clone()));
}
}
// Sort by descending block number
targets.sort_by(|a, b| b.1.block_number.cmp(&a.1.block_number));
targets
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script-sequence/src/sequence.rs | crates/script-sequence/src/sequence.rs | use crate::transaction::TransactionWithMetadata;
use alloy_network::AnyTransactionReceipt;
use alloy_primitives::{TxHash, hex, map::HashMap};
use eyre::{ContextCompat, Result, WrapErr};
use foundry_common::{SELECTOR_LEN, TransactionMaybeSigned, fs, shell};
use foundry_compilers::ArtifactId;
use foundry_config::Config;
use serde::{Deserialize, Serialize};
use std::{
collections::VecDeque,
io::{BufWriter, Write},
path::PathBuf,
time::{Duration, SystemTime, UNIX_EPOCH},
};
pub const DRY_RUN_DIR: &str = "dry-run";
#[derive(Clone, Serialize, Deserialize)]
pub struct NestedValue {
pub internal_type: String,
pub value: String,
}
/// Helper that saves the transactions sequence and its state on which transactions have been
/// broadcasted
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct ScriptSequence {
pub transactions: VecDeque<TransactionWithMetadata>,
pub receipts: Vec<AnyTransactionReceipt>,
pub libraries: Vec<String>,
pub pending: Vec<TxHash>,
#[serde(skip)]
/// Contains paths to the sequence files
/// None if sequence should not be saved to disk (e.g. part of a multi-chain sequence)
pub paths: Option<(PathBuf, PathBuf)>,
pub returns: HashMap<String, NestedValue>,
pub timestamp: u128,
pub chain: u64,
pub commit: Option<String>,
}
/// Sensitive values from the transactions in a script sequence
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct SensitiveTransactionMetadata {
pub rpc: String,
}
/// Sensitive info from the script sequence which is saved into the cache folder
#[derive(Clone, Default, Serialize, Deserialize)]
pub struct SensitiveScriptSequence {
pub transactions: VecDeque<SensitiveTransactionMetadata>,
}
impl From<&ScriptSequence> for SensitiveScriptSequence {
fn from(sequence: &ScriptSequence) -> Self {
Self {
transactions: sequence
.transactions
.iter()
.map(|tx| SensitiveTransactionMetadata { rpc: tx.rpc.clone() })
.collect(),
}
}
}
impl ScriptSequence {
/// Loads The sequence for the corresponding json file
pub fn load(
config: &Config,
sig: &str,
target: &ArtifactId,
chain_id: u64,
dry_run: bool,
) -> Result<Self> {
let (path, sensitive_path) = Self::get_paths(config, sig, target, chain_id, dry_run)?;
let mut script_sequence: Self = fs::read_json_file(&path)
.wrap_err(format!("Deployment not found for chain `{chain_id}`."))?;
let sensitive_script_sequence: SensitiveScriptSequence = fs::read_json_file(
&sensitive_path,
)
.wrap_err(format!("Deployment's sensitive details not found for chain `{chain_id}`."))?;
script_sequence.fill_sensitive(&sensitive_script_sequence);
script_sequence.paths = Some((path, sensitive_path));
Ok(script_sequence)
}
/// Saves the transactions as file if it's a standalone deployment.
/// `save_ts` should be set to true for checkpoint updates, which might happen many times and
/// could result in us saving many identical files.
pub fn save(&mut self, silent: bool, save_ts: bool) -> Result<()> {
self.sort_receipts();
if self.transactions.is_empty() {
return Ok(());
}
self.timestamp = now().as_millis();
let ts_name = format!("run-{}.json", self.timestamp);
let sensitive_script_sequence = SensitiveScriptSequence::from(&*self);
let Some((path, sensitive_path)) = self.paths.as_ref() else { return Ok(()) };
// broadcast folder writes
//../run-latest.json
let mut writer = BufWriter::new(fs::create_file(path)?);
serde_json::to_writer_pretty(&mut writer, &self)?;
writer.flush()?;
if save_ts {
//../run-[timestamp].json
fs::copy(path, path.with_file_name(&ts_name))?;
}
// cache folder writes
//../run-latest.json
let mut writer = BufWriter::new(fs::create_file(sensitive_path)?);
serde_json::to_writer_pretty(&mut writer, &sensitive_script_sequence)?;
writer.flush()?;
if save_ts {
//../run-[timestamp].json
fs::copy(sensitive_path, sensitive_path.with_file_name(&ts_name))?;
}
if !silent {
if shell::is_json() {
sh_println!(
"{}",
serde_json::json!({
"status": "success",
"transactions": path.display().to_string(),
"sensitive": sensitive_path.display().to_string(),
})
)?;
} else {
sh_println!("\nTransactions saved to: {}\n", path.display())?;
sh_println!("Sensitive values saved to: {}\n", sensitive_path.display())?;
}
}
Ok(())
}
pub fn add_receipt(&mut self, receipt: AnyTransactionReceipt) {
self.receipts.push(receipt);
}
/// Sorts all receipts with ascending transaction index
pub fn sort_receipts(&mut self) {
self.receipts.sort_by_key(|r| (r.block_number, r.transaction_index));
}
pub fn add_pending(&mut self, index: usize, tx_hash: TxHash) {
if !self.pending.contains(&tx_hash) {
self.transactions[index].hash = Some(tx_hash);
self.pending.push(tx_hash);
}
}
pub fn remove_pending(&mut self, tx_hash: TxHash) {
self.pending.retain(|element| element != &tx_hash);
}
/// Gets paths in the formats
/// `./broadcast/[contract_filename]/[chain_id]/[sig]-[timestamp].json` and
/// `./cache/[contract_filename]/[chain_id]/[sig]-[timestamp].json`.
pub fn get_paths(
config: &Config,
sig: &str,
target: &ArtifactId,
chain_id: u64,
dry_run: bool,
) -> Result<(PathBuf, PathBuf)> {
let mut broadcast = config.broadcast.to_path_buf();
let mut cache = config.cache_path.to_path_buf();
let mut common = PathBuf::new();
let target_fname = target.source.file_name().wrap_err("No filename.")?;
common.push(target_fname);
common.push(chain_id.to_string());
if dry_run {
common.push(DRY_RUN_DIR);
}
broadcast.push(common.clone());
cache.push(common);
fs::create_dir_all(&broadcast)?;
fs::create_dir_all(&cache)?;
// TODO: ideally we want the name of the function here if sig is calldata
let filename = sig_to_file_name(sig);
let filename_with_ext = format!("{filename}-latest.json");
broadcast.push(&filename_with_ext);
cache.push(&filename_with_ext);
Ok((broadcast, cache))
}
/// Returns the first RPC URL of this sequence.
pub fn rpc_url(&self) -> &str {
self.transactions.front().expect("empty sequence").rpc.as_str()
}
/// Returns the list of the transactions without the metadata.
pub fn transactions(&self) -> impl Iterator<Item = &TransactionMaybeSigned> {
self.transactions.iter().map(|tx| tx.tx())
}
pub fn fill_sensitive(&mut self, sensitive: &SensitiveScriptSequence) {
self.transactions
.iter_mut()
.enumerate()
.for_each(|(i, tx)| tx.rpc.clone_from(&sensitive.transactions[i].rpc));
}
}
/// Converts the `sig` argument into the corresponding file path.
///
/// This accepts either the signature of the function or the raw calldata.
pub fn sig_to_file_name(sig: &str) -> String {
if let Some((name, _)) = sig.split_once('(') {
// strip until call argument parenthesis
return name.to_string();
}
// assume calldata if `sig` is hex
if let Ok(calldata) = hex::decode(sig.strip_prefix("0x").unwrap_or(sig)) {
// in which case we return the function selector if available
if let Some(selector) = calldata.get(..SELECTOR_LEN) {
return hex::encode(selector);
}
// fallback to original string if calldata is too short to contain selector
return sig.to_string();
}
sig.to_string()
}
pub fn now() -> Duration {
SystemTime::now().duration_since(UNIX_EPOCH).expect("time went backwards")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn can_convert_sig() {
assert_eq!(sig_to_file_name("run()").as_str(), "run");
assert_eq!(
sig_to_file_name(
"522bb704000000000000000000000000f39fd6e51aad88f6f4ce6ab8827279cfFFb92266"
)
.as_str(),
"522bb704"
);
// valid calldata with 0x prefix
assert_eq!(
sig_to_file_name(
"0x522bb704000000000000000000000000f39fd6e51aad88f6f4ce6ab8827279cfFFb92266"
)
.as_str(),
"522bb704"
);
// short calldata: should not panic and should return input as-is
assert_eq!(sig_to_file_name("0x1234").as_str(), "0x1234");
assert_eq!(sig_to_file_name("123").as_str(), "123");
// invalid hex: should return input as-is
assert_eq!(sig_to_file_name("0xnotahex").as_str(), "0xnotahex");
// non-hex non-signature: should return input as-is
assert_eq!(sig_to_file_name("not_a_sig_or_hex").as_str(), "not_a_sig_or_hex");
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script-sequence/src/lib.rs | crates/script-sequence/src/lib.rs | //! Script Sequence and related types.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#[macro_use]
extern crate foundry_common;
pub mod reader;
pub mod sequence;
pub mod transaction;
pub use reader::*;
pub use sequence::*;
pub use transaction::*;
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/script-sequence/src/transaction.rs | crates/script-sequence/src/transaction.rs | use alloy_primitives::{Address, B256, Bytes};
use foundry_common::TransactionMaybeSigned;
use revm_inspectors::tracing::types::CallKind;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct AdditionalContract {
#[serde(rename = "transactionType")]
pub opcode: CallKind,
pub contract_name: Option<String>,
pub address: Address,
pub init_code: Bytes,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TransactionWithMetadata {
pub hash: Option<B256>,
#[serde(rename = "transactionType")]
pub opcode: CallKind,
#[serde(default = "default_string")]
pub contract_name: Option<String>,
#[serde(default = "default_address")]
pub contract_address: Option<Address>,
#[serde(default = "default_string")]
pub function: Option<String>,
#[serde(default = "default_vec_of_strings")]
pub arguments: Option<Vec<String>>,
#[serde(skip)]
pub rpc: String,
pub transaction: TransactionMaybeSigned,
pub additional_contracts: Vec<AdditionalContract>,
pub is_fixed_gas_limit: bool,
}
fn default_string() -> Option<String> {
Some(String::new())
}
fn default_address() -> Option<Address> {
Some(Address::ZERO)
}
fn default_vec_of_strings() -> Option<Vec<String>> {
Some(vec![])
}
impl TransactionWithMetadata {
pub fn from_tx_request(transaction: TransactionMaybeSigned) -> Self {
Self {
transaction,
hash: Default::default(),
opcode: Default::default(),
contract_name: Default::default(),
contract_address: Default::default(),
function: Default::default(),
arguments: Default::default(),
is_fixed_gas_limit: Default::default(),
additional_contracts: Default::default(),
rpc: Default::default(),
}
}
pub fn tx(&self) -> &TransactionMaybeSigned {
&self.transaction
}
pub fn tx_mut(&mut self) -> &mut TransactionMaybeSigned {
&mut self.transaction
}
pub fn is_create2(&self) -> bool {
self.opcode == CallKind::Create2
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/lockfile.rs | crates/forge/src/lockfile.rs | //! foundry.lock handler type.
use alloy_primitives::map::HashMap;
use eyre::{OptionExt, Result};
use foundry_cli::utils::Git;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, hash_map::Entry},
path::{Path, PathBuf},
};
pub const FOUNDRY_LOCK: &str = "foundry.lock";
/// A type alias for a HashMap of dependencies keyed by relative path to the submodule dir.
pub type DepMap = HashMap<PathBuf, DepIdentifier>;
/// A lockfile handler that keeps track of the dependencies and their current state.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Lockfile<'a> {
/// A map of the dependencies keyed by relative path to the submodule dir.
#[serde(flatten)]
deps: DepMap,
/// This is optional to handle no-git scencarios.
#[serde(skip)]
git: Option<&'a Git<'a>>,
/// Absolute path to the lockfile.
#[serde(skip)]
lockfile_path: PathBuf,
}
impl<'a> Lockfile<'a> {
/// Create a new [`Lockfile`] instance.
///
/// `project_root` is the absolute path to the project root.
///
/// You will need to call [`Lockfile::read`] or [`Lockfile::sync`] to load the lockfile.
pub fn new(project_root: &Path) -> Self {
Self { deps: HashMap::default(), git: None, lockfile_path: project_root.join(FOUNDRY_LOCK) }
}
/// Set the git instance to be used for submodule operations.
pub fn with_git(mut self, git: &'a Git<'_>) -> Self {
self.git = Some(git);
self
}
/// Sync the foundry.lock file with the current state of `git submodules`.
///
/// If the lockfile and git submodules are out of sync, it returns a [`DepMap`] consisting of
/// _only_ the out-of-sync dependencies.
///
/// This method writes the lockfile to project root if:
/// - The lockfile does not exist.
/// - The lockfile is out of sync with the git submodules.
pub fn sync(&mut self, lib: &Path) -> Result<Option<DepMap>> {
match self.read() {
Ok(_) => {}
Err(e) => {
if !e.to_string().contains("Lockfile not found") {
return Err(e);
}
}
}
if let Some(git) = &self.git {
let submodules = git.submodules()?;
if submodules.is_empty() {
trace!("No submodules found. Skipping sync.");
return Ok(None);
}
let modules_with_branch = git
.read_submodules_with_branch(&Git::root_of(git.root)?, lib.file_name().unwrap())?;
let mut out_of_sync: DepMap = HashMap::default();
for sub in &submodules {
let rel_path = sub.path();
let rev = sub.rev();
let entry = self.deps.entry(rel_path.to_path_buf());
match entry {
Entry::Occupied(e) => {
if e.get().rev() != rev {
out_of_sync.insert(rel_path.to_path_buf(), e.get().clone());
}
}
Entry::Vacant(e) => {
// Check if there is branch specified for the submodule at rel_path in
// .gitmodules
let maybe_branch = modules_with_branch.get(rel_path).map(|b| b.to_string());
trace!(?maybe_branch, submodule = ?rel_path, "submodule branch");
if let Some(branch) = maybe_branch {
let dep_id = DepIdentifier::Branch {
name: branch,
rev: rev.to_string(),
r#override: false,
};
e.insert(dep_id.clone());
out_of_sync.insert(rel_path.to_path_buf(), dep_id);
continue;
}
let dep_id = DepIdentifier::Rev { rev: rev.to_string(), r#override: false };
trace!(submodule=?rel_path, ?dep_id, "submodule dep_id");
e.insert(dep_id.clone());
out_of_sync.insert(rel_path.to_path_buf(), dep_id);
}
}
}
return Ok(if out_of_sync.is_empty() { None } else { Some(out_of_sync) });
}
Ok(None)
}
/// Loads the lockfile from the project root.
///
/// Throws an error if the lockfile does not exist.
pub fn read(&mut self) -> Result<()> {
if !self.lockfile_path.exists() {
return Err(eyre::eyre!("Lockfile not found at {}", self.lockfile_path.display()));
}
let lockfile_str = foundry_common::fs::read_to_string(&self.lockfile_path)?;
self.deps = serde_json::from_str(&lockfile_str)?;
trace!(lockfile = ?self.deps, "loaded lockfile");
Ok(())
}
/// Writes the lockfile to the project root.
pub fn write(&self) -> Result<()> {
let ordered_deps: BTreeMap<_, _> = self.deps.clone().into_iter().collect();
foundry_common::fs::write_pretty_json_file(&self.lockfile_path, &ordered_deps)?;
trace!(at= ?self.lockfile_path, "wrote lockfile");
Ok(())
}
/// Insert a dependency into the lockfile.
/// If the dependency already exists, it will be updated.
///
/// Note: This does not write the updated lockfile to disk, only inserts the dep in-memory.
pub fn insert(&mut self, path: PathBuf, dep_id: DepIdentifier) {
self.deps.insert(path, dep_id);
}
/// Get the [`DepIdentifier`] for a submodule at a given path.
pub fn get(&self, path: &Path) -> Option<&DepIdentifier> {
self.deps.get(path)
}
/// Removes a dependency from the lockfile.
///
/// Note: This does not write the updated lockfile to disk, only removes the dep in-memory.
pub fn remove(&mut self, path: &Path) -> Option<DepIdentifier> {
self.deps.remove(path)
}
/// Override a dependency in the lockfile.
///
/// Returns the overridden/previous [`DepIdentifier`].
/// This is used in `forge update` to decide whether a dep's tag/branch/rev should be updated.
///
/// Throws an error if the dependency is not found in the lockfile.
pub fn override_dep(
&mut self,
dep: &Path,
mut new_dep_id: DepIdentifier,
) -> Result<DepIdentifier> {
let prev = self
.deps
.get_mut(dep)
.map(|d| {
new_dep_id.mark_override();
std::mem::replace(d, new_dep_id)
})
.ok_or_eyre(format!("Dependency not found in lockfile: {}", dep.display()))?;
Ok(prev)
}
/// Returns the num of dependencies in the lockfile.
pub fn len(&self) -> usize {
self.deps.len()
}
/// Returns whether the lockfile is empty.
pub fn is_empty(&self) -> bool {
self.deps.is_empty()
}
/// Returns an iterator over the lockfile.
pub fn iter(&self) -> impl Iterator<Item = (&PathBuf, &DepIdentifier)> {
self.deps.iter()
}
/// Returns an mutable iterator over the lockfile.
pub fn iter_mut(&mut self) -> impl Iterator<Item = (&PathBuf, &mut DepIdentifier)> {
self.deps.iter_mut()
}
pub fn exists(&self) -> bool {
self.lockfile_path.exists()
}
}
// Implement .iter() for &LockFile
/// Identifies whether a dependency (submodule) is referenced by a branch,
/// tag or rev (commit hash).
///
/// Each enum variant consists of an `r#override` flag which is used in `forge update` to decide
/// whether to update a dep or not. This flag is skipped during serialization.
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
pub enum DepIdentifier {
/// `name` of the branch and the `rev` it is currently pointing to.
/// Running `forge update`, will update the `name` branch to the latest `rev`.
#[serde(rename = "branch")]
Branch {
name: String,
rev: String,
#[serde(skip)]
r#override: bool,
},
/// Release tag `name` and the `rev` it is currently pointing to.
/// Running `forge update` does not update the tag/rev.
/// Dependency will remain pinned to the existing tag/rev unless r#override like so `forge
/// update owner/dep@tag=different_tag`.
#[serde(rename = "tag")]
Tag {
name: String,
rev: String,
#[serde(skip)]
r#override: bool,
},
/// Commit hash `rev` the submodule is currently pointing to.
/// Running `forge update` does not update the rev.
/// Dependency will remain pinned to the existing rev unless r#override.
#[serde(rename = "rev", untagged)]
Rev {
rev: String,
#[serde(skip)]
r#override: bool,
},
}
impl DepIdentifier {
/// Resolves the [`DepIdentifier`] for a submodule at a given path.
/// `lib_path` is the absolute path to the submodule.
pub fn resolve_type(git: &Git<'_>, lib_path: &Path, s: &str) -> Result<Self> {
trace!(lib_path = ?lib_path, resolving_type = ?s, "resolving submodule identifier");
// Get the tags for the submodule
if git.has_tag(s, lib_path)? {
let rev = git.get_rev(s, lib_path)?;
return Ok(Self::Tag { name: String::from(s), rev, r#override: false });
}
if git.has_branch(s, lib_path)? {
let rev = git.get_rev(s, lib_path)?;
return Ok(Self::Branch { name: String::from(s), rev, r#override: false });
}
if git.has_rev(s, lib_path)? {
return Ok(Self::Rev { rev: String::from(s), r#override: false });
}
Err(eyre::eyre!("Could not resolve tag type for submodule at path {}", lib_path.display()))
}
/// Get the commit hash of the dependency.
pub fn rev(&self) -> &str {
match self {
Self::Branch { rev, .. } => rev,
Self::Tag { rev, .. } => rev,
Self::Rev { rev, .. } => rev,
}
}
/// Get the name of the dependency.
///
/// In case of a Rev, this will return the commit hash.
pub fn name(&self) -> &str {
match self {
Self::Branch { name, .. } => name,
Self::Tag { name, .. } => name,
Self::Rev { rev, .. } => rev,
}
}
/// Get the name/rev to checkout at.
pub fn checkout_id(&self) -> &str {
match self {
Self::Branch { name, .. } => name,
Self::Tag { name, .. } => name,
Self::Rev { rev, .. } => rev,
}
}
/// Marks as dependency as overridden.
pub fn mark_override(&mut self) {
match self {
Self::Branch { r#override, .. } => *r#override = true,
Self::Tag { r#override, .. } => *r#override = true,
Self::Rev { r#override, .. } => *r#override = true,
}
}
/// Returns whether the dependency has been overridden.
pub fn overridden(&self) -> bool {
match self {
Self::Branch { r#override, .. } => *r#override,
Self::Tag { r#override, .. } => *r#override,
Self::Rev { r#override, .. } => *r#override,
}
}
/// Returns whether the dependency is a branch.
pub fn is_branch(&self) -> bool {
matches!(self, Self::Branch { .. })
}
}
impl std::fmt::Display for DepIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Branch { name, rev, .. } => write!(f, "branch={name}@{rev}"),
Self::Tag { name, rev, .. } => write!(f, "tag={name}@{rev}"),
Self::Rev { rev, .. } => write!(f, "rev={rev}"),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use tempfile::tempdir;
#[test]
fn serde_dep_identifier() {
let branch = DepIdentifier::Branch {
name: "main".to_string(),
rev: "b7954c3e9ce1d487b49489f5800f52f4b77b7351".to_string(),
r#override: false,
};
let tag = DepIdentifier::Tag {
name: "v0.1.0".to_string(),
rev: "b7954c3e9ce1d487b49489f5800f52f4b77b7351".to_string(),
r#override: false,
};
let rev = DepIdentifier::Rev {
rev: "b7954c3e9ce1d487b49489f5800f52f4b77b7351".to_string(),
r#override: false,
};
let branch_str = serde_json::to_string(&branch).unwrap();
let tag_str = serde_json::to_string(&tag).unwrap();
let rev_str = serde_json::to_string(&rev).unwrap();
assert_eq!(
branch_str,
r#"{"branch":{"name":"main","rev":"b7954c3e9ce1d487b49489f5800f52f4b77b7351"}}"#
);
assert_eq!(
tag_str,
r#"{"tag":{"name":"v0.1.0","rev":"b7954c3e9ce1d487b49489f5800f52f4b77b7351"}}"#
);
assert_eq!(rev_str, r#"{"rev":"b7954c3e9ce1d487b49489f5800f52f4b77b7351"}"#);
let branch_de: DepIdentifier = serde_json::from_str(&branch_str).unwrap();
let tag_de: DepIdentifier = serde_json::from_str(&tag_str).unwrap();
let rev_de: DepIdentifier = serde_json::from_str(&rev_str).unwrap();
assert_eq!(branch, branch_de);
assert_eq!(tag, tag_de);
assert_eq!(rev, rev_de);
}
#[test]
fn test_write_ordered_deps() {
let dir = tempdir().unwrap();
let mut lockfile = Lockfile::new(dir.path());
lockfile.insert(
PathBuf::from("z_dep"),
DepIdentifier::Rev { rev: "3".to_string(), r#override: false },
);
lockfile.insert(
PathBuf::from("a_dep"),
DepIdentifier::Rev { rev: "1".to_string(), r#override: false },
);
lockfile.insert(
PathBuf::from("c_dep"),
DepIdentifier::Rev { rev: "2".to_string(), r#override: false },
);
let _ = lockfile.write();
let contents = fs::read_to_string(lockfile.lockfile_path).unwrap();
let expected = r#"{
"a_dep": {
"rev": "1"
},
"c_dep": {
"rev": "2"
},
"z_dep": {
"rev": "3"
}
}"#;
assert_eq!(contents.trim(), expected.trim());
let mut lockfile = Lockfile::new(dir.path());
lockfile.read().unwrap();
lockfile.insert(
PathBuf::from("x_dep"),
DepIdentifier::Rev { rev: "4".to_string(), r#override: false },
);
let _ = lockfile.write();
let contents = fs::read_to_string(lockfile.lockfile_path).unwrap();
let expected = r#"{
"a_dep": {
"rev": "1"
},
"c_dep": {
"rev": "2"
},
"x_dep": {
"rev": "4"
},
"z_dep": {
"rev": "3"
}
}"#;
assert_eq!(contents.trim(), expected.trim());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/coverage.rs | crates/forge/src/coverage.rs | //! Coverage reports.
use alloy_primitives::map::{HashMap, HashSet};
use comfy_table::{
Attribute, Cell, Color, Row, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN,
};
use evm_disassembler::disassemble_bytes;
use foundry_common::{fs, shell};
use semver::Version;
use std::{
collections::hash_map,
io::Write,
path::{Path, PathBuf},
};
pub use foundry_evm::coverage::*;
/// A coverage reporter.
pub trait CoverageReporter {
/// Returns a debug string for the reporter.
fn name(&self) -> &'static str;
/// Returns `true` if the reporter needs source maps for the final report.
fn needs_source_maps(&self) -> bool {
false
}
/// Runs the reporter.
fn report(&mut self, report: &CoverageReport) -> eyre::Result<()>;
}
/// A simple summary reporter that prints the coverage results in a table.
pub struct CoverageSummaryReporter {
/// The summary table.
table: Table,
/// The total coverage of the entire project.
total: CoverageSummary,
}
impl Default for CoverageSummaryReporter {
fn default() -> Self {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![
Cell::new("File"),
Cell::new("% Lines"),
Cell::new("% Statements"),
Cell::new("% Branches"),
Cell::new("% Funcs"),
]);
Self { table, total: CoverageSummary::default() }
}
}
impl CoverageSummaryReporter {
fn add_row(&mut self, name: impl Into<Cell>, summary: CoverageSummary) {
let mut row = Row::new();
row.add_cell(name.into())
.add_cell(format_cell(summary.line_hits, summary.line_count))
.add_cell(format_cell(summary.statement_hits, summary.statement_count))
.add_cell(format_cell(summary.branch_hits, summary.branch_count))
.add_cell(format_cell(summary.function_hits, summary.function_count));
self.table.add_row(row);
}
}
impl CoverageReporter for CoverageSummaryReporter {
fn name(&self) -> &'static str {
"summary"
}
fn report(&mut self, report: &CoverageReport) -> eyre::Result<()> {
for (path, summary) in report.summary_by_file() {
self.total.merge(&summary);
self.add_row(path.display(), summary);
}
self.add_row("Total", self.total.clone());
sh_println!("\n{}", self.table)?;
Ok(())
}
}
fn format_cell(hits: usize, total: usize) -> Cell {
let percentage = if total == 0 { 1. } else { hits as f64 / total as f64 };
let mut cell =
Cell::new(format!("{:.2}% ({hits}/{total})", percentage * 100.)).fg(match percentage {
_ if total == 0 => Color::Grey,
_ if percentage < 0.5 => Color::Red,
_ if percentage < 0.75 => Color::Yellow,
_ => Color::Green,
});
if total == 0 {
cell = cell.add_attribute(Attribute::Dim);
}
cell
}
/// Writes the coverage report in [LCOV]'s [tracefile format].
///
/// [LCOV]: https://github.com/linux-test-project/lcov
/// [tracefile format]: https://man.archlinux.org/man/geninfo.1.en#TRACEFILE_FORMAT
pub struct LcovReporter {
path: PathBuf,
version: Version,
}
impl LcovReporter {
/// Create a new LCOV reporter.
pub fn new(path: PathBuf, version: Version) -> Self {
Self { path, version }
}
}
impl CoverageReporter for LcovReporter {
fn name(&self) -> &'static str {
"lcov"
}
fn report(&mut self, report: &CoverageReport) -> eyre::Result<()> {
let mut out = std::io::BufWriter::new(fs::create_file(&self.path)?);
let mut fn_index = 0usize;
for (path, items) in report.items_by_file() {
let summary = CoverageSummary::from_items(items.iter().copied());
writeln!(out, "TN:")?;
writeln!(out, "SF:{}", path.display())?;
let mut recorded_lines = HashSet::new();
for item in items {
let line = item.loc.lines.start;
// `lines` is half-open, so we need to subtract 1 to get the last included line.
let end_line = item.loc.lines.end - 1;
let hits = item.hits;
match item.kind {
CoverageItemKind::Function { ref name } => {
let name = format!("{}.{name}", item.loc.contract_name);
if self.version >= Version::new(2, 2, 0) {
// v2.2 changed the FN format.
writeln!(out, "FNL:{fn_index},{line},{end_line}")?;
writeln!(out, "FNA:{fn_index},{hits},{name}")?;
fn_index += 1;
} else if self.version >= Version::new(2, 0, 0) {
// v2.0 added end_line to FN.
writeln!(out, "FN:{line},{end_line},{name}")?;
writeln!(out, "FNDA:{hits},{name}")?;
} else {
writeln!(out, "FN:{line},{name}")?;
writeln!(out, "FNDA:{hits},{name}")?;
}
}
// Add lines / statement hits only once.
CoverageItemKind::Line | CoverageItemKind::Statement => {
if recorded_lines.insert(line) {
writeln!(out, "DA:{line},{hits}")?;
}
}
CoverageItemKind::Branch { branch_id, path_id, .. } => {
let hits_str = if hits == 0 { "-" } else { &hits.to_string() };
writeln!(out, "BRDA:{line},{branch_id},{path_id},{hits_str}")?;
}
}
}
// Function summary
writeln!(out, "FNF:{}", summary.function_count)?;
writeln!(out, "FNH:{}", summary.function_hits)?;
// Line summary
writeln!(out, "LF:{}", summary.line_count)?;
writeln!(out, "LH:{}", summary.line_hits)?;
// Branch summary
writeln!(out, "BRF:{}", summary.branch_count)?;
writeln!(out, "BRH:{}", summary.branch_hits)?;
writeln!(out, "end_of_record")?;
}
out.flush()?;
sh_println!("Wrote LCOV report.")?;
Ok(())
}
}
/// A super verbose reporter for debugging coverage while it is still unstable.
pub struct DebugReporter;
impl CoverageReporter for DebugReporter {
fn name(&self) -> &'static str {
"debug"
}
fn report(&mut self, report: &CoverageReport) -> eyre::Result<()> {
for (path, items) in report.items_by_file() {
let src = fs::read_to_string(path)?;
sh_println!("{}:", path.display())?;
for item in items {
sh_println!("- {}", item.fmt_with_source(Some(&src)))?;
}
sh_println!()?;
}
for (contract_id, (cta, rta)) in &report.anchors {
if cta.is_empty() && rta.is_empty() {
continue;
}
sh_println!("Anchors for {contract_id}:")?;
let anchors = cta
.iter()
.map(|anchor| (false, anchor))
.chain(rta.iter().map(|anchor| (true, anchor)));
for (is_runtime, anchor) in anchors {
let kind = if is_runtime { " runtime" } else { "creation" };
sh_println!(
"- {kind} {anchor}: {}",
report
.analyses
.get(&contract_id.version)
.and_then(|items| items.get(anchor.item_id))
.map_or_else(|| "None".to_owned(), |item| item.to_string())
)?;
}
sh_println!()?;
}
Ok(())
}
}
pub struct BytecodeReporter {
root: PathBuf,
destdir: PathBuf,
}
impl BytecodeReporter {
pub fn new(root: PathBuf, destdir: PathBuf) -> Self {
Self { root, destdir }
}
}
impl CoverageReporter for BytecodeReporter {
fn name(&self) -> &'static str {
"bytecode"
}
fn needs_source_maps(&self) -> bool {
true
}
fn report(&mut self, report: &CoverageReport) -> eyre::Result<()> {
use std::fmt::Write;
fs::create_dir_all(&self.destdir)?;
let no_source_elements = Vec::new();
let mut line_number_cache = LineNumberCache::new(self.root.clone());
for (contract_id, hits) in &report.bytecode_hits {
let ops = disassemble_bytes(hits.bytecode().to_vec())?;
let mut formatted = String::new();
let source_elements =
report.source_maps.get(contract_id).map(|sm| &sm.1).unwrap_or(&no_source_elements);
for (code, source_element) in std::iter::zip(ops.iter(), source_elements) {
let hits = hits
.get(code.offset)
.map(|h| format!("[{h:03}]"))
.unwrap_or(" ".to_owned());
let source_id = source_element.index();
let source_path = source_id.and_then(|i| {
report.source_paths.get(&(contract_id.version.clone(), i as usize))
});
let code = format!("{code:?}");
let start = source_element.offset() as usize;
let end = (source_element.offset() + source_element.length()) as usize;
if let Some(source_path) = source_path {
let (sline, spos) = line_number_cache.get_position(source_path, start)?;
let (eline, epos) = line_number_cache.get_position(source_path, end)?;
writeln!(
formatted,
"{} {:40} // {}: {}:{}-{}:{} ({}-{})",
hits,
code,
source_path.display(),
sline,
spos,
eline,
epos,
start,
end
)?;
} else if let Some(source_id) = source_id {
writeln!(formatted, "{hits} {code:40} // SRCID{source_id}: ({start}-{end})")?;
} else {
writeln!(formatted, "{hits} {code:40}")?;
}
}
fs::write(
self.destdir.join(&*contract_id.contract_name).with_extension("asm"),
formatted,
)?;
}
Ok(())
}
}
/// Cache line number offsets for source files
struct LineNumberCache {
root: PathBuf,
line_offsets: HashMap<PathBuf, Vec<usize>>,
}
impl LineNumberCache {
pub fn new(root: PathBuf) -> Self {
Self { root, line_offsets: HashMap::default() }
}
pub fn get_position(&mut self, path: &Path, offset: usize) -> eyre::Result<(usize, usize)> {
let line_offsets = match self.line_offsets.entry(path.to_path_buf()) {
hash_map::Entry::Occupied(o) => o.into_mut(),
hash_map::Entry::Vacant(v) => {
let text = fs::read_to_string(self.root.join(path))?;
let mut line_offsets = vec![0];
for line in text.lines() {
let line_offset = line.as_ptr() as usize - text.as_ptr() as usize;
line_offsets.push(line_offset);
}
v.insert(line_offsets)
}
};
let lo = match line_offsets.binary_search(&offset) {
Ok(lo) => lo,
Err(lo) => lo - 1,
};
let pos = offset - line_offsets.get(lo).unwrap() + 1;
Ok((lo, pos))
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/lib.rs | crates/forge/src/lib.rs | //! Forge is a fast and flexible Ethereum testing framework.
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[macro_use]
extern crate foundry_common;
#[macro_use]
extern crate tracing;
// Required for optional features (aws-kms, gcp-kms, turnkey)
#[cfg(any(feature = "aws-kms", feature = "gcp-kms", feature = "turnkey"))]
use foundry_wallets as _;
pub mod args;
pub mod cmd;
pub mod opts;
pub mod coverage;
pub mod gas_report;
pub mod multi_runner;
pub use multi_runner::{MultiContractRunner, MultiContractRunnerBuilder};
mod runner;
pub use runner::ContractRunner;
mod progress;
pub mod result;
// TODO: remove
pub use foundry_common::traits::TestFilter;
pub use foundry_evm::*;
mod lockfile;
pub use lockfile::{DepIdentifier, DepMap, FOUNDRY_LOCK, Lockfile};
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/runner.rs | crates/forge/src/runner.rs | //! The Forge test runner.
use crate::{
MultiContractRunner, TestFilter,
coverage::HitMaps,
fuzz::{BaseCounterExample, FuzzTestResult},
multi_runner::{TestContract, TestRunnerConfig},
progress::{TestsProgress, start_fuzz_progress},
result::{SuiteResult, TestResult, TestSetup},
};
use alloy_dyn_abi::{DynSolValue, JsonAbiExt};
use alloy_json_abi::Function;
use alloy_primitives::{Address, Bytes, U256, address, map::HashMap};
use eyre::Result;
use foundry_common::{TestFunctionExt, TestFunctionKind, contracts::ContractsByAddress};
use foundry_compilers::utils::canonicalized;
use foundry_config::{Config, FuzzCorpusConfig};
use foundry_evm::{
constants::CALLER,
decode::RevertDecoder,
executors::{
CallResult, EvmError, Executor, ITest, RawCallResult,
fuzz::FuzzedExecutor,
invariant::{
InvariantExecutor, InvariantFuzzError, check_sequence, replay_error, replay_run,
},
},
fuzz::{
BasicTxDetails, CallDetails, CounterExample, FuzzFixtures, fixture_name,
invariant::InvariantContract, strategies::EvmFuzzState,
},
traces::{TraceKind, TraceMode, load_contracts},
};
use itertools::Itertools;
use proptest::test_runner::{RngAlgorithm, TestError, TestRng, TestRunner};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use std::{
borrow::Cow,
cmp::min,
collections::BTreeMap,
path::{Path, PathBuf},
sync::Arc,
time::Instant,
};
use tokio::signal;
use tracing::Span;
/// When running tests, we deploy all external libraries present in the project. To avoid additional
/// libraries affecting nonces of senders used in tests, we are using separate address to
/// predeploy libraries.
///
/// `address(uint160(uint256(keccak256("foundry library deployer"))))`
pub const LIBRARY_DEPLOYER: Address = address!("0x1F95D37F27EA0dEA9C252FC09D5A6eaA97647353");
/// A type that executes all tests of a contract
pub struct ContractRunner<'a> {
/// The name of the contract.
name: &'a str,
/// The data of the contract.
contract: &'a TestContract,
/// The EVM executor.
executor: Executor,
/// Overall test run progress.
progress: Option<&'a TestsProgress>,
/// The handle to the tokio runtime.
tokio_handle: &'a tokio::runtime::Handle,
/// The span of the contract.
span: tracing::Span,
/// The contract-level configuration.
tcfg: Cow<'a, TestRunnerConfig>,
/// The parent runner.
mcr: &'a MultiContractRunner,
}
impl<'a> std::ops::Deref for ContractRunner<'a> {
type Target = Cow<'a, TestRunnerConfig>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.tcfg
}
}
impl<'a> ContractRunner<'a> {
pub fn new(
name: &'a str,
contract: &'a TestContract,
executor: Executor,
progress: Option<&'a TestsProgress>,
tokio_handle: &'a tokio::runtime::Handle,
span: Span,
mcr: &'a MultiContractRunner,
) -> Self {
Self {
name,
contract,
executor,
progress,
tokio_handle,
span,
tcfg: Cow::Borrowed(&mcr.tcfg),
mcr,
}
}
/// Deploys the test contract inside the runner from the sending account, and optionally runs
/// the `setUp` function on the test contract.
pub fn setup(&mut self, call_setup: bool) -> TestSetup {
self._setup(call_setup).unwrap_or_else(|err| {
if err.to_string().contains("skipped") {
TestSetup::skipped(err.to_string())
} else {
TestSetup::failed(err.to_string())
}
})
}
fn _setup(&mut self, call_setup: bool) -> Result<TestSetup> {
trace!(call_setup, "setting up");
self.apply_contract_inline_config()?;
// We max out their balance so that they can deploy and make calls.
self.executor.set_balance(self.sender, U256::MAX)?;
self.executor.set_balance(CALLER, U256::MAX)?;
// We set the nonce of the deployer accounts to 1 to get the same addresses as DappTools.
self.executor.set_nonce(self.sender, 1)?;
// Deploy libraries.
self.executor.set_balance(LIBRARY_DEPLOYER, U256::MAX)?;
let mut result = TestSetup::default();
for code in &self.mcr.libs_to_deploy {
let deploy_result = self.executor.deploy(
LIBRARY_DEPLOYER,
code.clone(),
U256::ZERO,
Some(&self.mcr.revert_decoder),
);
// Record deployed library address.
if let Ok(deployed) = &deploy_result {
result.deployed_libs.push(deployed.address);
}
let (raw, reason) = RawCallResult::from_evm_result(deploy_result.map(Into::into))?;
result.extend(raw, TraceKind::Deployment);
if reason.is_some() {
debug!(?reason, "deployment of library failed");
result.reason = reason;
return Ok(result);
}
}
let address = self.sender.create(self.executor.get_nonce(self.sender)?);
result.address = address;
// Set the contracts initial balance before deployment, so it is available during
// construction
self.executor.set_balance(address, self.initial_balance())?;
// Deploy the test contract
let deploy_result = self.executor.deploy(
self.sender,
self.contract.bytecode.clone(),
U256::ZERO,
Some(&self.mcr.revert_decoder),
);
result.deployment_failure = deploy_result.is_err();
if let Ok(dr) = &deploy_result {
debug_assert_eq!(dr.address, address);
}
let (raw, reason) = RawCallResult::from_evm_result(deploy_result.map(Into::into))?;
result.extend(raw, TraceKind::Deployment);
if reason.is_some() {
debug!(?reason, "deployment of test contract failed");
result.reason = reason;
return Ok(result);
}
// Reset `self.sender`s, `CALLER`s and `LIBRARY_DEPLOYER`'s balance to the initial balance.
self.executor.set_balance(self.sender, self.initial_balance())?;
self.executor.set_balance(CALLER, self.initial_balance())?;
self.executor.set_balance(LIBRARY_DEPLOYER, self.initial_balance())?;
self.executor.deploy_create2_deployer()?;
// Optionally call the `setUp` function
if call_setup {
trace!("calling setUp");
let res = self.executor.setup(None, address, Some(&self.mcr.revert_decoder));
let (raw, reason) = RawCallResult::from_evm_result(res)?;
result.extend(raw, TraceKind::Setup);
result.reason = reason;
}
result.fuzz_fixtures = self.fuzz_fixtures(address);
Ok(result)
}
fn initial_balance(&self) -> U256 {
self.evm_opts.initial_balance
}
/// Configures this runner with the inline configuration for the contract.
fn apply_contract_inline_config(&mut self) -> Result<()> {
if self.inline_config.contains_contract(self.name) {
let new_config = Arc::new(self.inline_config(None)?);
self.tcfg.to_mut().reconfigure_with(new_config);
let prev_tracer = self.executor.inspector_mut().tracer.take();
self.tcfg.configure_executor(&mut self.executor);
// Don't set tracer here.
self.executor.inspector_mut().tracer = prev_tracer;
}
Ok(())
}
/// Returns the configuration for a contract or function.
fn inline_config(&self, func: Option<&Function>) -> Result<Config> {
let function = func.map(|f| f.name.as_str()).unwrap_or("");
let config =
self.mcr.inline_config.merge(self.name, function, &self.config).extract::<Config>()?;
Ok(config)
}
/// Collect fixtures from test contract.
///
/// Fixtures can be defined:
/// - as storage arrays in test contract, prefixed with `fixture`
/// - as functions prefixed with `fixture` and followed by parameter name to be fuzzed
///
/// Storage array fixtures:
/// `uint256[] public fixture_amount = [1, 2, 3];`
/// define an array of uint256 values to be used for fuzzing `amount` named parameter in scope
/// of the current test.
///
/// Function fixtures:
/// `function fixture_owner() public returns (address[] memory){}`
/// returns an array of addresses to be used for fuzzing `owner` named parameter in scope of the
/// current test.
fn fuzz_fixtures(&mut self, address: Address) -> FuzzFixtures {
let mut fixtures = HashMap::default();
let fixture_functions = self.contract.abi.functions().filter(|func| func.is_fixture());
for func in fixture_functions {
if func.inputs.is_empty() {
// Read fixtures declared as functions.
if let Ok(CallResult { raw: _, decoded_result }) =
self.executor.call(CALLER, address, func, &[], U256::ZERO, None)
{
fixtures.insert(fixture_name(func.name.clone()), decoded_result);
}
} else {
// For reading fixtures from storage arrays we collect values by calling the
// function with incremented indexes until there's an error.
let mut vals = Vec::new();
let mut index = 0;
loop {
if let Ok(CallResult { raw: _, decoded_result }) = self.executor.call(
CALLER,
address,
func,
&[DynSolValue::Uint(U256::from(index), 256)],
U256::ZERO,
None,
) {
vals.push(decoded_result);
} else {
// No result returned for this index, we reached the end of storage
// array or the function is not a valid fixture.
break;
}
index += 1;
}
fixtures.insert(fixture_name(func.name.clone()), DynSolValue::Array(vals));
};
}
FuzzFixtures::new(fixtures)
}
/// Runs all tests for a contract whose names match the provided regular expression
pub fn run_tests(mut self, filter: &dyn TestFilter) -> SuiteResult {
let start = Instant::now();
let mut warnings = Vec::new();
// Check if `setUp` function with valid signature declared.
let setup_fns: Vec<_> =
self.contract.abi.functions().filter(|func| func.name.is_setup()).collect();
let call_setup = setup_fns.len() == 1 && setup_fns[0].name == "setUp";
// There is a single miss-cased `setUp` function, so we add a warning
for &setup_fn in &setup_fns {
if setup_fn.name != "setUp" {
warnings.push(format!(
"Found invalid setup function \"{}\" did you mean \"setUp()\"?",
setup_fn.signature()
));
}
}
// There are multiple setUp function, so we return a single test result for `setUp`
if setup_fns.len() > 1 {
return SuiteResult::new(
start.elapsed(),
[("setUp()".to_string(), TestResult::fail("multiple setUp functions".to_string()))]
.into(),
warnings,
);
}
// Check if `afterInvariant` function with valid signature declared.
let after_invariant_fns: Vec<_> =
self.contract.abi.functions().filter(|func| func.name.is_after_invariant()).collect();
if after_invariant_fns.len() > 1 {
// Return a single test result failure if multiple functions declared.
return SuiteResult::new(
start.elapsed(),
[(
"afterInvariant()".to_string(),
TestResult::fail("multiple afterInvariant functions".to_string()),
)]
.into(),
warnings,
);
}
let call_after_invariant = after_invariant_fns.first().is_some_and(|after_invariant_fn| {
let match_sig = after_invariant_fn.name == "afterInvariant";
if !match_sig {
warnings.push(format!(
"Found invalid afterInvariant function \"{}\" did you mean \"afterInvariant()\"?",
after_invariant_fn.signature()
));
}
match_sig
});
// Invariant testing requires tracing to figure out what contracts were created.
// We also want to disable `debug` for setup since we won't be using those traces.
let has_invariants = self.contract.abi.functions().any(|func| func.is_invariant_test());
let prev_tracer = self.executor.inspector_mut().tracer.take();
if prev_tracer.is_some() || has_invariants {
self.executor.set_tracing(TraceMode::Call);
}
let setup_time = Instant::now();
let setup = self.setup(call_setup);
debug!("finished setting up in {:?}", setup_time.elapsed());
self.executor.inspector_mut().tracer = prev_tracer;
if setup.reason.is_some() {
// The setup failed, so we return a single test result for `setUp`
let fail_msg = if !setup.deployment_failure {
"setUp()".to_string()
} else {
"constructor()".to_string()
};
return SuiteResult::new(
start.elapsed(),
[(fail_msg, TestResult::setup_result(setup))].into(),
warnings,
);
}
// Filter out functions sequentially since it's very fast and there is no need to do it
// in parallel.
let find_timer = Instant::now();
let functions = self
.contract
.abi
.functions()
.filter(|func| filter.matches_test_function(func))
.collect::<Vec<_>>();
debug!(
"Found {} test functions out of {} in {:?}",
functions.len(),
self.contract.abi.functions().count(),
find_timer.elapsed(),
);
let identified_contracts = has_invariants.then(|| {
load_contracts(setup.traces.iter().map(|(_, t)| &t.arena), &self.mcr.known_contracts)
});
let test_fail_functions =
functions.iter().filter(|func| func.test_function_kind().is_any_test_fail());
if test_fail_functions.clone().next().is_some() {
let fail = || {
TestResult::fail("`testFail*` has been removed. Consider changing to test_Revert[If|When]_Condition and expecting a revert".to_string())
};
let test_results = test_fail_functions.map(|func| (func.signature(), fail())).collect();
return SuiteResult::new(start.elapsed(), test_results, warnings);
}
let early_exit = &self.tcfg.early_exit;
if self.progress.is_some() {
let interrupt = early_exit.clone();
self.tokio_handle.spawn(async move {
signal::ctrl_c().await.expect("Failed to listen for Ctrl+C");
interrupt.record_ctrl_c();
});
}
let test_results = functions
.par_iter()
.filter_map(|&func| {
// Early exit if we're running with fail-fast and a test already failed.
if early_exit.should_stop() {
return None;
}
let start = Instant::now();
let _guard = self.tokio_handle.enter();
let _guard;
let current_span = tracing::Span::current();
if current_span.is_none() || current_span.id() != self.span.id() {
_guard = self.span.enter();
}
let sig = func.signature();
let kind = func.test_function_kind();
let _guard = debug_span!(
"test",
%kind,
name = %if enabled!(tracing::Level::TRACE) { &sig } else { &func.name },
)
.entered();
let mut res = FunctionRunner::new(&self, &setup).run(
func,
kind,
call_after_invariant,
identified_contracts.as_ref(),
);
res.duration = start.elapsed();
// Record test failure for early exit (only triggers if fail-fast is enabled).
if res.status.is_failure() {
early_exit.record_failure();
}
Some((sig, res))
})
.collect::<BTreeMap<_, _>>();
let duration = start.elapsed();
SuiteResult::new(duration, test_results, warnings)
}
}
/// Executes a single test function, returning a [`TestResult`].
struct FunctionRunner<'a> {
/// The function-level configuration.
tcfg: Cow<'a, TestRunnerConfig>,
/// The EVM executor.
executor: Cow<'a, Executor>,
/// The parent runner.
cr: &'a ContractRunner<'a>,
/// The address of the test contract.
address: Address,
/// The test setup result.
setup: &'a TestSetup,
/// The test result. Returned after running the test.
result: TestResult,
}
impl<'a> std::ops::Deref for FunctionRunner<'a> {
type Target = Cow<'a, TestRunnerConfig>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.tcfg
}
}
impl<'a> FunctionRunner<'a> {
fn new(cr: &'a ContractRunner<'a>, setup: &'a TestSetup) -> Self {
Self {
tcfg: match &cr.tcfg {
Cow::Borrowed(tcfg) => Cow::Borrowed(tcfg),
Cow::Owned(tcfg) => Cow::Owned(tcfg.clone()),
},
executor: Cow::Borrowed(&cr.executor),
cr,
address: setup.address,
setup,
result: TestResult::new(setup),
}
}
fn revert_decoder(&self) -> &'a RevertDecoder {
&self.cr.mcr.revert_decoder
}
/// Configures this runner with the inline configuration for the contract.
fn apply_function_inline_config(&mut self, func: &Function) -> Result<()> {
if self.inline_config.contains_function(self.cr.name, &func.name) {
let new_config = Arc::new(self.cr.inline_config(Some(func))?);
self.tcfg.to_mut().reconfigure_with(new_config);
self.tcfg.configure_executor(self.executor.to_mut());
}
Ok(())
}
fn run(
mut self,
func: &Function,
kind: TestFunctionKind,
call_after_invariant: bool,
identified_contracts: Option<&ContractsByAddress>,
) -> TestResult {
if let Err(e) = self.apply_function_inline_config(func) {
self.result.single_fail(Some(e.to_string()));
return self.result;
}
match kind {
TestFunctionKind::UnitTest { .. } => self.run_unit_test(func),
TestFunctionKind::FuzzTest { .. } => self.run_fuzz_test(func),
TestFunctionKind::TableTest => self.run_table_test(func),
TestFunctionKind::InvariantTest => {
let test_bytecode = &self.cr.contract.bytecode;
self.run_invariant_test(
func,
call_after_invariant,
identified_contracts.unwrap(),
test_bytecode,
)
}
_ => unreachable!(),
}
}
/// Runs a single unit test.
///
/// Applies before test txes (if any), runs current test and returns the `TestResult`.
///
/// Before test txes are applied in order and state modifications committed to the EVM database
/// (therefore the unit test call will be made on modified state).
/// State modifications of before test txes and unit test function call are discarded after
/// test ends, similar to `eth_call`.
fn run_unit_test(mut self, func: &Function) -> TestResult {
// Prepare unit test execution.
if self.prepare_test(func).is_err() {
return self.result;
}
// Run current unit test.
let (mut raw_call_result, reason) = match self.executor.call(
self.sender,
self.address,
func,
&[],
U256::ZERO,
Some(self.revert_decoder()),
) {
Ok(res) => (res.raw, None),
Err(EvmError::Execution(err)) => (err.raw, Some(err.reason)),
Err(EvmError::Skip(reason)) => {
self.result.single_skip(reason);
return self.result;
}
Err(err) => {
self.result.single_fail(Some(err.to_string()));
return self.result;
}
};
let success =
self.executor.is_raw_call_mut_success(self.address, &mut raw_call_result, false);
self.result.single_result(success, reason, raw_call_result);
self.result
}
/// Runs a table test.
/// The parameters dataset (table) is created from defined parameter fixtures, therefore each
/// test table parameter should have the same number of fixtures defined.
/// E.g. for table test
/// - `table_test(uint256 amount, bool swap)` fixtures are defined as
/// - `uint256[] public fixtureAmount = [2, 5]`
/// - `bool[] public fixtureSwap = [true, false]` The `table_test` is then called with the pair
/// of args `(2, true)` and `(5, false)`.
fn run_table_test(mut self, func: &Function) -> TestResult {
// Prepare unit test execution.
if self.prepare_test(func).is_err() {
return self.result;
}
// Extract and validate fixtures for the first table test parameter.
let Some(first_param) = func.inputs.first() else {
self.result.single_fail(Some("Table test should have at least one parameter".into()));
return self.result;
};
let Some(first_param_fixtures) =
&self.setup.fuzz_fixtures.param_fixtures(first_param.name())
else {
self.result.single_fail(Some("Table test should have fixtures defined".into()));
return self.result;
};
if first_param_fixtures.is_empty() {
self.result.single_fail(Some("Table test should have at least one fixture".into()));
return self.result;
}
let fixtures_len = first_param_fixtures.len();
let mut table_fixtures = vec![&first_param_fixtures[..]];
// Collect fixtures for remaining parameters.
for param in &func.inputs[1..] {
let param_name = param.name();
let Some(fixtures) = &self.setup.fuzz_fixtures.param_fixtures(param.name()) else {
self.result.single_fail(Some(format!("No fixture defined for param {param_name}")));
return self.result;
};
if fixtures.len() != fixtures_len {
self.result.single_fail(Some(format!(
"{} fixtures defined for {param_name} (expected {})",
fixtures.len(),
fixtures_len
)));
return self.result;
}
table_fixtures.push(&fixtures[..]);
}
let progress = start_fuzz_progress(
self.cr.progress,
self.cr.name,
&func.name,
None,
fixtures_len as u32,
);
let mut result = FuzzTestResult::default();
for i in 0..fixtures_len {
if self.tcfg.early_exit.should_stop() {
return self.result;
}
// Increment progress bar.
if let Some(progress) = progress.as_ref() {
progress.inc(1);
}
let args = table_fixtures.iter().map(|row| row[i].clone()).collect_vec();
let (mut raw_call_result, reason) = match self.executor.call(
self.sender,
self.address,
func,
&args,
U256::ZERO,
Some(self.revert_decoder()),
) {
Ok(res) => (res.raw, None),
Err(EvmError::Execution(err)) => (err.raw, Some(err.reason)),
Err(EvmError::Skip(reason)) => {
self.result.single_skip(reason);
return self.result;
}
Err(err) => {
self.result.single_fail(Some(err.to_string()));
return self.result;
}
};
result.gas_by_case.push((raw_call_result.gas_used, raw_call_result.stipend));
result.logs.extend(raw_call_result.logs.clone());
result.labels.extend(raw_call_result.labels.clone());
HitMaps::merge_opt(&mut result.line_coverage, raw_call_result.line_coverage.clone());
let is_success =
self.executor.is_raw_call_mut_success(self.address, &mut raw_call_result, false);
// Record counterexample if test fails.
if !is_success {
result.counterexample =
Some(CounterExample::Single(BaseCounterExample::from_fuzz_call(
Bytes::from(func.abi_encode_input(&args).unwrap()),
args,
raw_call_result.traces.clone(),
)));
result.reason = reason;
result.traces = raw_call_result.traces;
self.result.table_result(result);
return self.result;
}
// If it's the last iteration and all other runs succeeded, then use last call result
// for logs and traces.
if i == fixtures_len - 1 {
result.success = true;
result.traces = raw_call_result.traces;
self.result.table_result(result);
return self.result;
}
}
self.result
}
fn run_invariant_test(
mut self,
func: &Function,
call_after_invariant: bool,
identified_contracts: &ContractsByAddress,
test_bytecode: &Bytes,
) -> TestResult {
// First, run the test normally to see if it needs to be skipped.
if let Err(EvmError::Skip(reason)) = self.executor.call(
self.sender,
self.address,
func,
&[],
U256::ZERO,
Some(self.revert_decoder()),
) {
self.result.invariant_skip(reason);
return self.result;
};
let runner = self.invariant_runner();
let invariant_config = &self.config.invariant;
let mut executor = self.clone_executor();
// Enable edge coverage if running with coverage guided fuzzing or with edge coverage
// metrics (useful for benchmarking the fuzzer).
executor
.inspector_mut()
.collect_edge_coverage(invariant_config.corpus.collect_edge_coverage());
let mut config = invariant_config.clone();
let (failure_dir, failure_file) = test_paths(
&mut config.corpus,
invariant_config.failure_persist_dir.clone().unwrap(),
self.cr.name,
&func.name,
);
let mut evm = InvariantExecutor::new(
executor,
runner,
config,
identified_contracts,
&self.cr.mcr.known_contracts,
);
let invariant_contract = InvariantContract {
address: self.address,
invariant_function: func,
call_after_invariant,
abi: &self.cr.contract.abi,
};
let show_solidity = invariant_config.show_solidity;
let progress = start_fuzz_progress(
self.cr.progress,
self.cr.name,
&func.name,
invariant_config.timeout,
invariant_config.runs,
);
// Try to replay recorded failure if any.
if let Some(mut call_sequence) =
persisted_call_sequence(failure_file.as_path(), test_bytecode)
{
// Create calls from failed sequence and check if invariant still broken.
let txes = call_sequence
.iter_mut()
.map(|seq| {
seq.show_solidity = show_solidity;
BasicTxDetails {
warp: seq.warp,
roll: seq.roll,
sender: seq.sender.unwrap_or_default(),
call_details: CallDetails {
target: seq.addr.unwrap_or_default(),
calldata: seq.calldata.clone(),
},
}
})
.collect::<Vec<BasicTxDetails>>();
if let Ok((success, replayed_entirely)) = check_sequence(
self.clone_executor(),
&txes,
(0..min(txes.len(), invariant_config.depth as usize)).collect(),
invariant_contract.address,
invariant_contract.invariant_function.selector().to_vec().into(),
invariant_config.fail_on_revert,
invariant_contract.call_after_invariant,
) && !success
{
let warn = format!(
"Replayed invariant failure from {:?} file. \nRun `forge clean` or remove file to ignore failure and to continue invariant test campaign.",
failure_file.as_path()
);
if let Some(ref progress) = progress {
progress.set_prefix(format!("{}\n{warn}\n", &func.name));
} else {
let _ = sh_warn!("{warn}");
}
// If sequence still fails then replay error to collect traces and exit without
// executing new runs.
match replay_error(
evm.config(),
self.clone_executor(),
&txes,
None,
&invariant_contract,
&self.cr.mcr.known_contracts,
identified_contracts.clone(),
&mut self.result.logs,
&mut self.result.traces,
&mut self.result.line_coverage,
&mut self.result.deprecated_cheatcodes,
progress.as_ref(),
&self.tcfg.early_exit,
) {
Ok(replayed_call_sequence) => {
if !replayed_call_sequence.is_empty() {
call_sequence = replayed_call_sequence;
// Persist error in invariant failure dir.
record_invariant_failure(
failure_dir.as_path(),
failure_file.as_path(),
&call_sequence,
test_bytecode,
);
}
}
Err(err) => {
error!(%err, "Failed to replay invariant error");
}
}
self.result.invariant_replay_fail(
replayed_entirely,
&invariant_contract.invariant_function.name,
call_sequence,
);
return self.result;
}
}
let invariant_result = match evm.invariant_fuzz(
invariant_contract.clone(),
&self.setup.fuzz_fixtures,
self.build_fuzz_state(true),
progress.as_ref(),
&self.tcfg.early_exit,
) {
Ok(x) => x,
Err(e) => {
self.result.invariant_setup_fail(e);
return self.result;
}
};
// Merge coverage collected during invariant run with test setup coverage.
self.result.merge_coverages(invariant_result.line_coverage);
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | true |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/args.rs | crates/forge/src/args.rs | use crate::{
cmd::{cache::CacheSubcommands, generate::GenerateSubcommands, watch},
opts::{Forge, ForgeSubcommand},
};
use clap::{CommandFactory, Parser};
use clap_complete::generate;
use eyre::Result;
use foundry_cli::utils;
use foundry_common::shell;
use foundry_evm::inspectors::cheatcodes::{ForgeContext, set_execution_context};
/// Run the `forge` command line interface.
pub fn run() -> Result<()> {
setup()?;
let args = Forge::parse();
args.global.init()?;
run_command(args)
}
/// Setup the global logger and other utilities.
pub fn setup() -> Result<()> {
utils::common_setup();
utils::subscriber();
Ok(())
}
/// Run the subcommand.
pub fn run_command(args: Forge) -> Result<()> {
// Set the execution context based on the subcommand.
let context = match &args.cmd {
ForgeSubcommand::Test(_) => ForgeContext::Test,
ForgeSubcommand::Coverage(_) => ForgeContext::Coverage,
ForgeSubcommand::Snapshot(_) => ForgeContext::Snapshot,
ForgeSubcommand::Script(cmd) => {
if cmd.broadcast {
ForgeContext::ScriptBroadcast
} else if cmd.resume {
ForgeContext::ScriptResume
} else {
ForgeContext::ScriptDryRun
}
}
_ => ForgeContext::Unknown,
};
set_execution_context(context);
let global = &args.global;
// Run the subcommand.
match args.cmd {
ForgeSubcommand::Test(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_test(cmd))
} else {
let silent = cmd.junit || shell::is_json();
let outcome = global.block_on(cmd.run())?;
outcome.ensure_ok(silent)
}
}
ForgeSubcommand::Script(cmd) => global.block_on(cmd.run_script()),
ForgeSubcommand::Coverage(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_coverage(cmd))
} else {
global.block_on(cmd.run())
}
}
ForgeSubcommand::Bind(cmd) => cmd.run(),
ForgeSubcommand::Build(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_build(cmd))
} else {
global.block_on(cmd.run()).map(drop)
}
}
ForgeSubcommand::VerifyContract(args) => global.block_on(args.run()),
ForgeSubcommand::VerifyCheck(args) => global.block_on(args.run()),
ForgeSubcommand::VerifyBytecode(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Clone(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Cache(cmd) => match cmd.sub {
CacheSubcommands::Clean(cmd) => cmd.run(),
CacheSubcommands::Ls(cmd) => cmd.run(),
},
ForgeSubcommand::Create(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Update(cmd) => cmd.run(),
ForgeSubcommand::Install(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Remove(cmd) => cmd.run(),
ForgeSubcommand::Remappings(cmd) => cmd.run(),
ForgeSubcommand::Init(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Completions { shell } => {
generate(shell, &mut Forge::command(), "forge", &mut std::io::stdout());
Ok(())
}
ForgeSubcommand::Clean { root } => {
let config = utils::load_config_with_root(root.as_deref())?;
let project = config.project()?;
config.cleanup(&project)?;
Ok(())
}
ForgeSubcommand::Snapshot(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_gas_snapshot(cmd))
} else {
global.block_on(cmd.run())
}
}
ForgeSubcommand::Fmt(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_fmt(cmd))
} else {
cmd.run()
}
}
ForgeSubcommand::Config(cmd) => cmd.run(),
ForgeSubcommand::Flatten(cmd) => cmd.run(),
ForgeSubcommand::Inspect(cmd) => cmd.run(),
ForgeSubcommand::Tree(cmd) => cmd.run(),
ForgeSubcommand::Geiger(cmd) => cmd.run(),
ForgeSubcommand::Doc(cmd) => {
if cmd.is_watch() {
global.block_on(watch::watch_doc(cmd))
} else {
global.block_on(cmd.run())?;
Ok(())
}
}
ForgeSubcommand::Selectors { command } => global.block_on(command.run()),
ForgeSubcommand::Generate(cmd) => match cmd.sub {
GenerateSubcommands::Test(cmd) => cmd.run(),
},
ForgeSubcommand::Compiler(cmd) => cmd.run(),
ForgeSubcommand::Soldeer(cmd) => global.block_on(cmd.run()),
ForgeSubcommand::Eip712(cmd) => cmd.run(),
ForgeSubcommand::BindJson(cmd) => cmd.run(),
ForgeSubcommand::Lint(cmd) => cmd.run(),
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/result.rs | crates/forge/src/result.rs | //! Test outcomes.
use crate::{
MultiContractRunner,
fuzz::{BaseCounterExample, FuzzedCases},
gas_report::GasReport,
};
use alloy_primitives::{
Address, Log,
map::{AddressHashMap, HashMap},
};
use eyre::Report;
use foundry_common::{get_contract_name, get_file_name, shell};
use foundry_evm::{
core::Breakpoints,
coverage::HitMaps,
decode::SkipReason,
executors::{RawCallResult, invariant::InvariantMetrics},
fuzz::{CounterExample, FuzzCase, FuzzFixtures, FuzzTestResult},
traces::{CallTraceArena, CallTraceDecoder, TraceKind, Traces},
};
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap as Map},
fmt::{self, Write},
time::Duration,
};
use yansi::Paint;
/// The aggregated result of a test run.
#[derive(Clone, Debug)]
pub struct TestOutcome {
/// The results of all test suites by their identifier (`path:contract_name`).
///
/// Essentially `identifier => signature => result`.
pub results: BTreeMap<String, SuiteResult>,
/// Whether to allow test failures without failing the entire test run.
pub allow_failure: bool,
/// The decoder used to decode traces and logs.
///
/// This is `None` if traces and logs were not decoded.
///
/// Note that `Address` fields only contain the last executed test case's data.
pub last_run_decoder: Option<CallTraceDecoder>,
/// The gas report, if requested.
pub gas_report: Option<GasReport>,
/// The runner used to execute the tests.
pub runner: Option<MultiContractRunner>,
}
impl TestOutcome {
/// Creates a new test outcome with the given results.
pub fn new(
runner: Option<MultiContractRunner>,
results: BTreeMap<String, SuiteResult>,
allow_failure: bool,
) -> Self {
Self { results, allow_failure, last_run_decoder: None, gas_report: None, runner }
}
/// Creates a new empty test outcome.
pub fn empty(runner: Option<MultiContractRunner>, allow_failure: bool) -> Self {
Self::new(runner, BTreeMap::new(), allow_failure)
}
/// Returns an iterator over all individual succeeding tests and their names.
pub fn successes(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_success())
}
/// Returns an iterator over all individual skipped tests and their names.
pub fn skips(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_skipped())
}
/// Returns an iterator over all individual failing tests and their names.
pub fn failures(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_failure())
}
/// Returns an iterator over all individual tests and their names.
pub fn tests(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.results.values().flat_map(|suite| suite.tests())
}
/// Flattens the test outcome into a list of individual tests.
// TODO: Replace this with `tests` and make it return `TestRef<'_>`
pub fn into_tests_cloned(&self) -> impl Iterator<Item = SuiteTestResult> + '_ {
self.results
.iter()
.flat_map(|(file, suite)| {
suite
.test_results
.iter()
.map(move |(sig, result)| (file.clone(), sig.clone(), result.clone()))
})
.map(|(artifact_id, signature, result)| SuiteTestResult {
artifact_id,
signature,
result,
})
}
/// Flattens the test outcome into a list of individual tests.
pub fn into_tests(self) -> impl Iterator<Item = SuiteTestResult> {
self.results
.into_iter()
.flat_map(|(file, suite)| {
suite.test_results.into_iter().map(move |t| (file.clone(), t))
})
.map(|(artifact_id, (signature, result))| SuiteTestResult {
artifact_id,
signature,
result,
})
}
/// Returns the number of tests that passed.
pub fn passed(&self) -> usize {
self.successes().count()
}
/// Returns the number of tests that were skipped.
pub fn skipped(&self) -> usize {
self.skips().count()
}
/// Returns the number of tests that failed.
pub fn failed(&self) -> usize {
self.failures().count()
}
/// Sums up all the durations of all individual test suites.
///
/// Note that this is not necessarily the wall clock time of the entire test run.
pub fn total_time(&self) -> Duration {
self.results.values().map(|suite| suite.duration).sum()
}
/// Formats the aggregated summary of all test suites into a string (for printing).
pub fn summary(&self, wall_clock_time: Duration) -> String {
let num_test_suites = self.results.len();
let suites = if num_test_suites == 1 { "suite" } else { "suites" };
let total_passed = self.passed();
let total_failed = self.failed();
let total_skipped = self.skipped();
let total_tests = total_passed + total_failed + total_skipped;
format!(
"\nRan {} test {} in {:.2?} ({:.2?} CPU time): {} tests passed, {} failed, {} skipped ({} total tests)",
num_test_suites,
suites,
wall_clock_time,
self.total_time(),
total_passed.green(),
total_failed.red(),
total_skipped.yellow(),
total_tests
)
}
/// Checks if there are any failures and failures are disallowed.
pub fn ensure_ok(&self, silent: bool) -> eyre::Result<()> {
let outcome = self;
let failures = outcome.failures().count();
if outcome.allow_failure || failures == 0 {
return Ok(());
}
if shell::is_quiet() || silent {
std::process::exit(1);
}
sh_println!("\nFailing tests:")?;
for (suite_name, suite) in &outcome.results {
let failed = suite.failed();
if failed == 0 {
continue;
}
let term = if failed > 1 { "tests" } else { "test" };
sh_println!("Encountered {failed} failing {term} in {suite_name}")?;
for (name, result) in suite.failures() {
sh_println!("{}", result.short_result(name))?;
}
sh_println!()?;
}
let successes = outcome.passed();
sh_println!(
"Encountered a total of {} failing tests, {} tests succeeded",
failures.to_string().red(),
successes.to_string().green()
)?;
// Show helpful hint for rerunning failed tests
let test_word = if failures == 1 { "test" } else { "tests" };
sh_println!(
"\nTip: Run {} to retry only the {} failed {}",
"`forge test --rerun`".cyan(),
failures,
test_word
)?;
std::process::exit(1);
}
/// Removes first test result, if any.
pub fn remove_first(&mut self) -> Option<(String, String, TestResult)> {
self.results.iter_mut().find_map(|(suite_name, suite)| {
if let Some(test_name) = suite.test_results.keys().next().cloned() {
let result = suite.test_results.remove(&test_name).unwrap();
Some((suite_name.clone(), test_name, result))
} else {
None
}
})
}
}
/// A set of test results for a single test suite, which is all the tests in a single contract.
#[derive(Clone, Debug, Serialize)]
pub struct SuiteResult {
/// Wall clock time it took to execute all tests in this suite.
#[serde(with = "foundry_common::serde_helpers::duration")]
pub duration: Duration,
/// Individual test results: `test fn signature -> TestResult`.
pub test_results: BTreeMap<String, TestResult>,
/// Generated warnings.
pub warnings: Vec<String>,
}
impl SuiteResult {
pub fn new(
duration: Duration,
test_results: BTreeMap<String, TestResult>,
mut warnings: Vec<String>,
) -> Self {
// Add deprecated cheatcodes warning, if any of them used in current test suite.
let mut deprecated_cheatcodes = HashMap::new();
for test_result in test_results.values() {
deprecated_cheatcodes.extend(test_result.deprecated_cheatcodes.clone());
}
if !deprecated_cheatcodes.is_empty() {
let mut warning =
"the following cheatcode(s) are deprecated and will be removed in future versions:"
.to_string();
for (cheatcode, reason) in deprecated_cheatcodes {
write!(warning, "\n {cheatcode}").unwrap();
if let Some(reason) = reason {
write!(warning, ": {reason}").unwrap();
}
}
warnings.push(warning);
}
Self { duration, test_results, warnings }
}
/// Returns an iterator over all individual succeeding tests and their names.
pub fn successes(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_success())
}
/// Returns an iterator over all individual skipped tests and their names.
pub fn skips(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_skipped())
}
/// Returns an iterator over all individual failing tests and their names.
pub fn failures(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.tests().filter(|(_, t)| t.status.is_failure())
}
/// Returns the number of tests that passed.
pub fn passed(&self) -> usize {
self.successes().count()
}
/// Returns the number of tests that were skipped.
pub fn skipped(&self) -> usize {
self.skips().count()
}
/// Returns the number of tests that failed.
pub fn failed(&self) -> usize {
self.failures().count()
}
/// Iterator over all tests and their names
pub fn tests(&self) -> impl Iterator<Item = (&String, &TestResult)> {
self.test_results.iter()
}
/// Whether this test suite is empty.
pub fn is_empty(&self) -> bool {
self.test_results.is_empty()
}
/// The number of tests in this test suite.
pub fn len(&self) -> usize {
self.test_results.len()
}
/// Sums up all the durations of all individual tests in this suite.
///
/// Note that this is not necessarily the wall clock time of the entire test suite.
pub fn total_time(&self) -> Duration {
self.test_results.values().map(|result| result.duration).sum()
}
/// Returns the summary of a single test suite.
pub fn summary(&self) -> String {
let failed = self.failed();
let result = if failed == 0 { "ok".green() } else { "FAILED".red() };
format!(
"Suite result: {}. {} passed; {} failed; {} skipped; finished in {:.2?} ({:.2?} CPU time)",
result,
self.passed().green(),
failed.red(),
self.skipped().yellow(),
self.duration,
self.total_time(),
)
}
}
/// The result of a single test in a test suite.
///
/// This is flattened from a [`TestOutcome`].
#[derive(Clone, Debug)]
pub struct SuiteTestResult {
/// The identifier of the artifact/contract in the form:
/// `<artifact file name>:<contract name>`.
pub artifact_id: String,
/// The function signature of the Solidity test.
pub signature: String,
/// The result of the executed test.
pub result: TestResult,
}
impl SuiteTestResult {
/// Returns the gas used by the test.
pub fn gas_used(&self) -> u64 {
self.result.kind.report().gas()
}
/// Returns the contract name of the artifact ID.
pub fn contract_name(&self) -> &str {
get_contract_name(&self.artifact_id)
}
/// Returns the file name of the artifact ID.
pub fn file_name(&self) -> &str {
get_file_name(&self.artifact_id)
}
}
/// The status of a test.
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
pub enum TestStatus {
Success,
#[default]
Failure,
Skipped,
}
impl TestStatus {
/// Returns `true` if the test was successful.
#[inline]
pub fn is_success(self) -> bool {
matches!(self, Self::Success)
}
/// Returns `true` if the test failed.
#[inline]
pub fn is_failure(self) -> bool {
matches!(self, Self::Failure)
}
/// Returns `true` if the test was skipped.
#[inline]
pub fn is_skipped(self) -> bool {
matches!(self, Self::Skipped)
}
}
/// The result of an executed test.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct TestResult {
/// The test status, indicating whether the test case succeeded, failed, or was marked as
/// skipped. This means that the transaction executed properly, the test was marked as
/// skipped with vm.skip(), or that there was a revert and that the test was expected to
/// fail (prefixed with `testFail`)
pub status: TestStatus,
/// If there was a revert, this field will be populated. Note that the test can
/// still be successful (i.e self.success == true) when it's expected to fail.
pub reason: Option<String>,
/// Minimal reproduction test case for failing test
pub counterexample: Option<CounterExample>,
/// Any captured & parsed as strings logs along the test's execution which should
/// be printed to the user.
pub logs: Vec<Log>,
/// The decoded DSTest logging events and Hardhat's `console.log` from [logs](Self::logs).
/// Used for json output.
pub decoded_logs: Vec<String>,
/// What kind of test this was
pub kind: TestKind,
/// Traces
pub traces: Traces,
/// Additional traces to use for gas report.
///
/// These are cleared after the gas report is analyzed.
#[serde(skip)]
pub gas_report_traces: Vec<Vec<CallTraceArena>>,
/// Raw line coverage info
#[serde(skip)]
pub line_coverage: Option<HitMaps>,
/// Labeled addresses
#[serde(rename = "labeled_addresses")] // Backwards compatibility.
pub labels: AddressHashMap<String>,
#[serde(with = "foundry_common::serde_helpers::duration")]
pub duration: Duration,
/// pc breakpoint char map
pub breakpoints: Breakpoints,
/// Any captured gas snapshots along the test's execution which should be accumulated.
pub gas_snapshots: BTreeMap<String, BTreeMap<String, String>>,
/// Deprecated cheatcodes (mapped to their replacements, if any) used in current test.
#[serde(skip)]
pub deprecated_cheatcodes: HashMap<&'static str, Option<&'static str>>,
}
impl fmt::Display for TestResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.status {
TestStatus::Success => "[PASS]".green().fmt(f),
TestStatus::Skipped => {
let mut s = String::from("[SKIP");
if let Some(reason) = &self.reason {
write!(s, ": {reason}").unwrap();
}
s.push(']');
s.yellow().fmt(f)
}
TestStatus::Failure => {
let mut s = String::from("[FAIL");
if self.reason.is_some() || self.counterexample.is_some() {
if let Some(reason) = &self.reason {
write!(s, ": {reason}").unwrap();
}
if let Some(counterexample) = &self.counterexample {
match counterexample {
CounterExample::Single(ex) => {
write!(s, "; counterexample: {ex}]").unwrap();
}
CounterExample::Sequence(original, sequence) => {
s.push_str(
format!(
"]\n\t[Sequence] (original: {original}, shrunk: {})\n",
sequence.len()
)
.as_str(),
);
for ex in sequence {
writeln!(s, "{ex}").unwrap();
}
}
}
} else {
s.push(']');
}
} else {
s.push(']');
}
s.red().wrap().fmt(f)
}
}
}
}
macro_rules! extend {
($a:expr, $b:expr, $trace_kind:expr) => {
$a.logs.extend($b.logs);
$a.labels.extend($b.labels);
$a.traces.extend($b.traces.map(|traces| ($trace_kind, traces)));
$a.merge_coverages($b.line_coverage);
};
}
impl TestResult {
/// Creates a new test result starting from test setup results.
pub fn new(setup: &TestSetup) -> Self {
Self {
labels: setup.labels.clone(),
logs: setup.logs.clone(),
traces: setup.traces.clone(),
line_coverage: setup.coverage.clone(),
..Default::default()
}
}
/// Creates a failed test result with given reason.
pub fn fail(reason: String) -> Self {
Self { status: TestStatus::Failure, reason: Some(reason), ..Default::default() }
}
/// Creates a test setup result.
pub fn setup_result(setup: TestSetup) -> Self {
let TestSetup {
address: _,
fuzz_fixtures: _,
logs,
labels,
traces,
coverage,
deployed_libs: _,
reason,
skipped,
deployment_failure: _,
} = setup;
Self {
status: if skipped { TestStatus::Skipped } else { TestStatus::Failure },
reason,
logs,
traces,
line_coverage: coverage,
labels,
..Default::default()
}
}
/// Returns the skipped result for single test (used in skipped fuzz test too).
pub fn single_skip(&mut self, reason: SkipReason) {
self.status = TestStatus::Skipped;
self.reason = reason.0;
}
/// Returns the failed result with reason for single test.
pub fn single_fail(&mut self, reason: Option<String>) {
self.status = TestStatus::Failure;
self.reason = reason;
}
/// Returns the result for single test. Merges execution results (logs, labeled addresses,
/// traces and coverages) in initial setup results.
pub fn single_result(
&mut self,
success: bool,
reason: Option<String>,
raw_call_result: RawCallResult,
) {
self.kind =
TestKind::Unit { gas: raw_call_result.gas_used.wrapping_sub(raw_call_result.stipend) };
extend!(self, raw_call_result, TraceKind::Execution);
self.status = match success {
true => TestStatus::Success,
false => TestStatus::Failure,
};
self.reason = reason;
self.duration = Duration::default();
self.gas_report_traces = Vec::new();
if let Some(cheatcodes) = raw_call_result.cheatcodes {
self.breakpoints = cheatcodes.breakpoints;
self.gas_snapshots = cheatcodes.gas_snapshots;
self.deprecated_cheatcodes = cheatcodes.deprecated;
}
}
/// Returns the result for a fuzzed test. Merges fuzz execution results (logs, labeled
/// addresses, traces and coverages) in initial setup results.
pub fn fuzz_result(&mut self, result: FuzzTestResult) {
self.kind = TestKind::Fuzz {
median_gas: result.median_gas(false),
mean_gas: result.mean_gas(false),
first_case: result.first_case,
runs: result.gas_by_case.len(),
failed_corpus_replays: result.failed_corpus_replays,
};
// Record logs, labels, traces and merge coverages.
extend!(self, result, TraceKind::Execution);
self.status = if result.skipped {
TestStatus::Skipped
} else if result.success {
TestStatus::Success
} else {
TestStatus::Failure
};
self.reason = result.reason;
self.counterexample = result.counterexample;
self.duration = Duration::default();
self.gas_report_traces = result.gas_report_traces.into_iter().map(|t| vec![t]).collect();
self.breakpoints = result.breakpoints.unwrap_or_default();
self.deprecated_cheatcodes = result.deprecated_cheatcodes;
}
/// Returns the fail result for fuzz test setup.
pub fn fuzz_setup_fail(&mut self, e: Report) {
self.kind = TestKind::Fuzz {
first_case: Default::default(),
runs: 0,
mean_gas: 0,
median_gas: 0,
failed_corpus_replays: 0,
};
self.status = TestStatus::Failure;
self.reason = Some(format!("failed to set up fuzz testing environment: {e}"));
}
/// Returns the skipped result for invariant test.
pub fn invariant_skip(&mut self, reason: SkipReason) {
self.kind = TestKind::Invariant {
runs: 1,
calls: 1,
reverts: 1,
metrics: HashMap::default(),
failed_corpus_replays: 0,
};
self.status = TestStatus::Skipped;
self.reason = reason.0;
}
/// Returns the fail result for replayed invariant test.
pub fn invariant_replay_fail(
&mut self,
replayed_entirely: bool,
invariant_name: &String,
call_sequence: Vec<BaseCounterExample>,
) {
self.kind = TestKind::Invariant {
runs: 1,
calls: 1,
reverts: 1,
metrics: HashMap::default(),
failed_corpus_replays: 0,
};
self.status = TestStatus::Failure;
self.reason = if replayed_entirely {
Some(format!("{invariant_name} replay failure"))
} else {
Some(format!("{invariant_name} persisted failure revert"))
};
self.counterexample = Some(CounterExample::Sequence(call_sequence.len(), call_sequence));
}
/// Returns the fail result for invariant test setup.
pub fn invariant_setup_fail(&mut self, e: Report) {
self.kind = TestKind::Invariant {
runs: 0,
calls: 0,
reverts: 0,
metrics: HashMap::default(),
failed_corpus_replays: 0,
};
self.status = TestStatus::Failure;
self.reason = Some(format!("failed to set up invariant testing environment: {e}"));
}
/// Returns the invariant test result.
#[expect(clippy::too_many_arguments)]
pub fn invariant_result(
&mut self,
gas_report_traces: Vec<Vec<CallTraceArena>>,
success: bool,
reason: Option<String>,
counterexample: Option<CounterExample>,
cases: Vec<FuzzedCases>,
reverts: usize,
metrics: Map<String, InvariantMetrics>,
failed_corpus_replays: usize,
) {
self.kind = TestKind::Invariant {
runs: cases.len(),
calls: cases.iter().map(|sequence| sequence.cases().len()).sum(),
reverts,
metrics,
failed_corpus_replays,
};
self.status = match success {
true => TestStatus::Success,
false => TestStatus::Failure,
};
self.reason = reason;
self.counterexample = counterexample;
self.gas_report_traces = gas_report_traces;
}
/// Returns the result for a table test. Merges table test execution results (logs, labeled
/// addresses, traces and coverages) in initial setup results.
pub fn table_result(&mut self, result: FuzzTestResult) {
self.kind = TestKind::Table {
median_gas: result.median_gas(false),
mean_gas: result.mean_gas(false),
runs: result.gas_by_case.len(),
};
// Record logs, labels, traces and merge coverages.
extend!(self, result, TraceKind::Execution);
self.status = if result.skipped {
TestStatus::Skipped
} else if result.success {
TestStatus::Success
} else {
TestStatus::Failure
};
self.reason = result.reason;
self.counterexample = result.counterexample;
self.duration = Duration::default();
self.gas_report_traces = result.gas_report_traces.into_iter().map(|t| vec![t]).collect();
self.breakpoints = result.breakpoints.unwrap_or_default();
self.deprecated_cheatcodes = result.deprecated_cheatcodes;
}
/// Returns `true` if this is the result of a fuzz test
pub fn is_fuzz(&self) -> bool {
matches!(self.kind, TestKind::Fuzz { .. })
}
/// Formats the test result into a string (for printing).
pub fn short_result(&self, name: &str) -> String {
format!("{self} {name} {}", self.kind.report())
}
/// Merges the given raw call result into `self`.
pub fn extend(&mut self, call_result: RawCallResult) {
extend!(self, call_result, TraceKind::Execution);
}
/// Merges the given coverage result into `self`.
pub fn merge_coverages(&mut self, other_coverage: Option<HitMaps>) {
HitMaps::merge_opt(&mut self.line_coverage, other_coverage);
}
}
/// Data report by a test.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum TestKindReport {
Unit {
gas: u64,
},
Fuzz {
runs: usize,
mean_gas: u64,
median_gas: u64,
failed_corpus_replays: usize,
},
Invariant {
runs: usize,
calls: usize,
reverts: usize,
metrics: Map<String, InvariantMetrics>,
failed_corpus_replays: usize,
},
Table {
runs: usize,
mean_gas: u64,
median_gas: u64,
},
}
impl fmt::Display for TestKindReport {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Unit { gas } => {
write!(f, "(gas: {gas})")
}
Self::Fuzz { runs, mean_gas, median_gas, failed_corpus_replays } => {
if *failed_corpus_replays != 0 {
write!(
f,
"(runs: {runs}, μ: {mean_gas}, ~: {median_gas}, failed corpus replays: {failed_corpus_replays})"
)
} else {
write!(f, "(runs: {runs}, μ: {mean_gas}, ~: {median_gas})")
}
}
Self::Invariant { runs, calls, reverts, metrics: _, failed_corpus_replays } => {
if *failed_corpus_replays != 0 {
write!(
f,
"(runs: {runs}, calls: {calls}, reverts: {reverts}, failed corpus replays: {failed_corpus_replays})"
)
} else {
write!(f, "(runs: {runs}, calls: {calls}, reverts: {reverts})")
}
}
Self::Table { runs, mean_gas, median_gas } => {
write!(f, "(runs: {runs}, μ: {mean_gas}, ~: {median_gas})")
}
}
}
}
impl TestKindReport {
/// Returns the main gas value to compare against
pub fn gas(&self) -> u64 {
match *self {
Self::Unit { gas } => gas,
// We use the median for comparisons
Self::Fuzz { median_gas, .. } | Self::Table { median_gas, .. } => median_gas,
// We return 0 since it's not applicable
Self::Invariant { .. } => 0,
}
}
}
/// Various types of tests
#[derive(Clone, Debug, Serialize, Deserialize)]
pub enum TestKind {
/// A unit test.
Unit { gas: u64 },
/// A fuzz test.
Fuzz {
/// we keep this for the debugger
first_case: FuzzCase,
runs: usize,
mean_gas: u64,
median_gas: u64,
failed_corpus_replays: usize,
},
/// An invariant test.
Invariant {
runs: usize,
calls: usize,
reverts: usize,
metrics: Map<String, InvariantMetrics>,
failed_corpus_replays: usize,
},
/// A table test.
Table { runs: usize, mean_gas: u64, median_gas: u64 },
}
impl Default for TestKind {
fn default() -> Self {
Self::Unit { gas: 0 }
}
}
impl TestKind {
/// The gas consumed by this test
pub fn report(&self) -> TestKindReport {
match self {
Self::Unit { gas } => TestKindReport::Unit { gas: *gas },
Self::Fuzz { first_case: _, runs, mean_gas, median_gas, failed_corpus_replays } => {
TestKindReport::Fuzz {
runs: *runs,
mean_gas: *mean_gas,
median_gas: *median_gas,
failed_corpus_replays: *failed_corpus_replays,
}
}
Self::Invariant { runs, calls, reverts, metrics: _, failed_corpus_replays } => {
TestKindReport::Invariant {
runs: *runs,
calls: *calls,
reverts: *reverts,
metrics: HashMap::default(),
failed_corpus_replays: *failed_corpus_replays,
}
}
Self::Table { runs, mean_gas, median_gas } => {
TestKindReport::Table { runs: *runs, mean_gas: *mean_gas, median_gas: *median_gas }
}
}
}
}
/// The result of a test setup.
///
/// Includes the deployment of the required libraries and the test contract itself, and the call to
/// the `setUp()` function.
#[derive(Clone, Debug, Default)]
pub struct TestSetup {
/// The address at which the test contract was deployed.
pub address: Address,
/// Defined fuzz test fixtures.
pub fuzz_fixtures: FuzzFixtures,
/// The logs emitted during setup.
pub logs: Vec<Log>,
/// Addresses labeled during setup.
pub labels: AddressHashMap<String>,
/// Call traces of the setup.
pub traces: Traces,
/// Coverage info during setup.
pub coverage: Option<HitMaps>,
/// Addresses of external libraries deployed during setup.
pub deployed_libs: Vec<Address>,
/// The reason the setup failed, if it did.
pub reason: Option<String>,
/// Whether setup and entire test suite is skipped.
pub skipped: bool,
/// Whether the test failed to deploy.
pub deployment_failure: bool,
}
impl TestSetup {
pub fn failed(reason: String) -> Self {
Self { reason: Some(reason), ..Default::default() }
}
pub fn skipped(reason: String) -> Self {
Self { reason: Some(reason), skipped: true, ..Default::default() }
}
pub fn extend(&mut self, raw: RawCallResult, trace_kind: TraceKind) {
extend!(self, raw, trace_kind);
}
pub fn merge_coverages(&mut self, other_coverage: Option<HitMaps>) {
HitMaps::merge_opt(&mut self.coverage, other_coverage);
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/progress.rs | crates/forge/src/progress.rs | use alloy_primitives::map::HashMap;
use chrono::Utc;
use indicatif::{MultiProgress, ProgressBar};
use parking_lot::Mutex;
use std::{sync::Arc, time::Duration};
/// State of [ProgressBar]s displayed for the given test run.
/// Shows progress of all test suites matching filter.
/// For each test within the test suite an individual progress bar is displayed.
/// When a test suite completes, their progress is removed from overall progress and result summary
/// is displayed.
#[derive(Debug)]
pub struct TestsProgressState {
/// Main [MultiProgress] instance showing progress for all test suites.
multi: MultiProgress,
/// Progress bar counting completed / remaining test suites.
overall_progress: ProgressBar,
/// Individual test suites progress.
suites_progress: HashMap<String, ProgressBar>,
}
impl TestsProgressState {
// Creates overall tests progress state.
pub fn new(suites_len: usize, threads_no: usize) -> Self {
let multi = MultiProgress::new();
let overall_progress = multi.add(ProgressBar::new(suites_len as u64));
overall_progress.set_style(
indicatif::ProgressStyle::with_template("{bar:40.cyan/blue} {pos:>7}/{len:7} {msg}")
.unwrap()
.progress_chars("##-"),
);
overall_progress.set_message(format!("completed (with {} threads)", threads_no as u64));
Self { multi, overall_progress, suites_progress: HashMap::default() }
}
/// Creates new test suite progress and add it to overall progress.
pub fn start_suite_progress(&mut self, suite_name: &String) {
let suite_progress = self.multi.add(ProgressBar::new_spinner());
suite_progress.set_style(
indicatif::ProgressStyle::with_template("{spinner} {wide_msg:.bold.dim}")
.unwrap()
.tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈ "),
);
suite_progress.set_message(format!("{suite_name} "));
suite_progress.enable_steady_tick(Duration::from_millis(100));
self.suites_progress.insert(suite_name.to_owned(), suite_progress);
}
/// Prints suite result summary and removes it from overall progress.
pub fn end_suite_progress(&mut self, suite_name: &String, result_summary: String) {
if let Some(suite_progress) = self.suites_progress.remove(suite_name) {
self.multi.suspend(|| {
let _ = sh_println!("{suite_name}\n ↪ {result_summary}");
});
suite_progress.finish_and_clear();
// Increment test progress bar to reflect completed test suite.
self.overall_progress.inc(1);
}
}
/// Creates progress entry for fuzz tests.
/// Set the prefix and total number of runs. Message is updated during execution with current
/// phase. Test progress is placed under test suite progress entry so all tests within suite
/// are grouped.
pub fn start_fuzz_progress(
&mut self,
suite_name: &str,
test_name: &String,
timeout: Option<u32>,
runs: u32,
) -> Option<ProgressBar> {
if let Some(suite_progress) = self.suites_progress.get(suite_name) {
let fuzz_progress =
self.multi.insert_after(suite_progress, ProgressBar::new(runs as u64));
let template = if let Some(timeout) = timeout {
let ends_at = (Utc::now() + chrono::Duration::seconds(timeout.into()))
.format("%H:%M:%S %Y-%m-%d")
.to_string();
format!(" ↪ {{prefix:.bold.dim}}: [{{pos}}] Runs, ends at {ends_at} UTC {{msg}}")
} else {
" ↪ {prefix:.bold.dim}: [{pos}/{len}] Runs {msg}".to_string()
};
fuzz_progress.set_style(
indicatif::ProgressStyle::with_template(&template).unwrap().tick_chars("⠁⠂⠄⡀⢀⠠⠐⠈ "),
);
fuzz_progress.set_prefix(test_name.to_string());
Some(fuzz_progress)
} else {
None
}
}
/// Removes overall test progress.
pub fn clear(&mut self) {
self.multi.clear().unwrap();
}
}
/// Cloneable wrapper around [TestsProgressState].
#[derive(Debug, Clone)]
pub struct TestsProgress {
pub inner: Arc<Mutex<TestsProgressState>>,
}
impl TestsProgress {
pub fn new(suites_len: usize, threads_no: usize) -> Self {
Self { inner: Arc::new(Mutex::new(TestsProgressState::new(suites_len, threads_no))) }
}
}
/// Helper function for creating fuzz test progress bar.
pub fn start_fuzz_progress(
tests_progress: Option<&TestsProgress>,
suite_name: &str,
test_name: &String,
timeout: Option<u32>,
runs: u32,
) -> Option<ProgressBar> {
if let Some(progress) = tests_progress {
progress.inner.lock().start_fuzz_progress(suite_name, test_name, timeout, runs)
} else {
None
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/multi_runner.rs | crates/forge/src/multi_runner.rs | //! Forge test runner for multiple contracts.
use crate::{
ContractRunner, TestFilter, progress::TestsProgress, result::SuiteResult,
runner::LIBRARY_DEPLOYER,
};
use alloy_json_abi::{Function, JsonAbi};
use alloy_primitives::{Address, Bytes, U256};
use eyre::Result;
use foundry_cli::opts::configure_pcx_from_compile_output;
use foundry_common::{
ContractsByArtifact, ContractsByArtifactBuilder, TestFunctionExt, get_contract_name,
shell::verbosity,
};
use foundry_compilers::{
Artifact, ArtifactId, Compiler, ProjectCompileOutput,
artifacts::{Contract, Libraries},
};
use foundry_config::{Config, InlineConfig};
use foundry_evm::{
Env,
backend::Backend,
decode::RevertDecoder,
executors::{EarlyExit, Executor, ExecutorBuilder},
fork::CreateFork,
fuzz::strategies::LiteralsDictionary,
inspectors::CheatsConfig,
opts::EvmOpts,
traces::{InternalTraceMode, TraceMode},
};
use foundry_evm_networks::NetworkConfigs;
use foundry_linking::{LinkOutput, Linker};
use rayon::prelude::*;
use revm::primitives::hardfork::SpecId;
use std::{
borrow::Borrow,
collections::BTreeMap,
path::Path,
sync::{Arc, mpsc},
time::Instant,
};
#[derive(Debug, Clone)]
pub struct TestContract {
pub abi: JsonAbi,
pub bytecode: Bytes,
}
pub type DeployableContracts = BTreeMap<ArtifactId, TestContract>;
/// A multi contract runner receives a set of contracts deployed in an EVM instance and proceeds
/// to run all test functions in these contracts.
#[derive(Clone, Debug)]
pub struct MultiContractRunner {
/// Mapping of contract name to JsonAbi, creation bytecode and library bytecode which
/// needs to be deployed & linked against
pub contracts: DeployableContracts,
/// Known contracts linked with computed library addresses.
pub known_contracts: ContractsByArtifact,
/// Revert decoder. Contains all known errors and their selectors.
pub revert_decoder: RevertDecoder,
/// Libraries to deploy.
pub libs_to_deploy: Vec<Bytes>,
/// Library addresses used to link contracts.
pub libraries: Libraries,
/// Solar compiler instance, to grant syntactic and semantic analysis capabilities
pub analysis: Arc<solar::sema::Compiler>,
/// Literals dictionary for fuzzing.
pub fuzz_literals: LiteralsDictionary,
/// The fork to use at launch
pub fork: Option<CreateFork>,
/// The base configuration for the test runner.
pub tcfg: TestRunnerConfig,
}
impl std::ops::Deref for MultiContractRunner {
type Target = TestRunnerConfig;
fn deref(&self) -> &Self::Target {
&self.tcfg
}
}
impl std::ops::DerefMut for MultiContractRunner {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.tcfg
}
}
impl MultiContractRunner {
/// Returns an iterator over all contracts that match the filter.
pub fn matching_contracts<'a: 'b, 'b>(
&'a self,
filter: &'b dyn TestFilter,
) -> impl Iterator<Item = (&'a ArtifactId, &'a TestContract)> + 'b {
self.contracts.iter().filter(|&(id, c)| matches_artifact(filter, id, &c.abi))
}
/// Returns an iterator over all test functions that match the filter.
pub fn matching_test_functions<'a: 'b, 'b>(
&'a self,
filter: &'b dyn TestFilter,
) -> impl Iterator<Item = &'a Function> + 'b {
self.matching_contracts(filter)
.flat_map(|(_, c)| c.abi.functions())
.filter(|func| filter.matches_test_function(func))
}
/// Returns an iterator over all test functions in contracts that match the filter.
pub fn all_test_functions<'a: 'b, 'b>(
&'a self,
filter: &'b dyn TestFilter,
) -> impl Iterator<Item = &'a Function> + 'b {
self.contracts
.iter()
.filter(|(id, _)| filter.matches_path(&id.source) && filter.matches_contract(&id.name))
.flat_map(|(_, c)| c.abi.functions())
.filter(|func| func.is_any_test())
}
/// Returns all matching tests grouped by contract grouped by file (file -> (contract -> tests))
pub fn list(&self, filter: &dyn TestFilter) -> BTreeMap<String, BTreeMap<String, Vec<String>>> {
self.matching_contracts(filter)
.map(|(id, c)| {
let source = id.source.as_path().display().to_string();
let name = id.name.clone();
let tests = c
.abi
.functions()
.filter(|func| filter.matches_test_function(func))
.map(|func| func.name.clone())
.collect::<Vec<_>>();
(source, name, tests)
})
.fold(BTreeMap::new(), |mut acc, (source, name, tests)| {
acc.entry(source).or_default().insert(name, tests);
acc
})
}
/// Executes _all_ tests that match the given `filter`.
///
/// The same as [`test`](Self::test), but returns the results instead of streaming them.
///
/// Note that this method returns only when all tests have been executed.
pub fn test_collect(
&mut self,
filter: &dyn TestFilter,
) -> Result<BTreeMap<String, SuiteResult>> {
Ok(self.test_iter(filter)?.collect())
}
/// Executes _all_ tests that match the given `filter`.
///
/// The same as [`test`](Self::test), but returns the results instead of streaming them.
///
/// Note that this method returns only when all tests have been executed.
pub fn test_iter(
&mut self,
filter: &dyn TestFilter,
) -> Result<impl Iterator<Item = (String, SuiteResult)>> {
let (tx, rx) = mpsc::channel();
self.test(filter, tx, false)?;
Ok(rx.into_iter())
}
/// Executes _all_ tests that match the given `filter`.
///
/// This will create the runtime based on the configured `evm` ops and create the `Backend`
/// before executing all contracts and their tests in _parallel_.
///
/// Each Executor gets its own instance of the `Backend`.
pub fn test(
&mut self,
filter: &dyn TestFilter,
tx: mpsc::Sender<(String, SuiteResult)>,
show_progress: bool,
) -> Result<()> {
let tokio_handle = tokio::runtime::Handle::current();
trace!("running all tests");
// The DB backend that serves all the data.
let db = Backend::spawn(self.fork.take())?;
let find_timer = Instant::now();
let contracts = self.matching_contracts(filter).collect::<Vec<_>>();
let find_time = find_timer.elapsed();
debug!(
"Found {} test contracts out of {} in {:?}",
contracts.len(),
self.contracts.len(),
find_time,
);
if show_progress {
let tests_progress = TestsProgress::new(contracts.len(), rayon::current_num_threads());
// Collect test suite results to stream at the end of test run.
let results: Vec<(String, SuiteResult)> = contracts
.par_iter()
.map(|&(id, contract)| {
let _guard = tokio_handle.enter();
tests_progress.inner.lock().start_suite_progress(&id.identifier());
let result = self.run_test_suite(
id,
contract,
&db,
filter,
&tokio_handle,
Some(&tests_progress),
);
tests_progress
.inner
.lock()
.end_suite_progress(&id.identifier(), result.summary());
(id.identifier(), result)
})
.collect();
tests_progress.inner.lock().clear();
results.iter().for_each(|result| {
let _ = tx.send(result.to_owned());
});
} else {
contracts.par_iter().for_each(|&(id, contract)| {
let _guard = tokio_handle.enter();
let result = self.run_test_suite(id, contract, &db, filter, &tokio_handle, None);
let _ = tx.send((id.identifier(), result));
})
}
Ok(())
}
fn run_test_suite(
&self,
artifact_id: &ArtifactId,
contract: &TestContract,
db: &Backend,
filter: &dyn TestFilter,
tokio_handle: &tokio::runtime::Handle,
progress: Option<&TestsProgress>,
) -> SuiteResult {
let identifier = artifact_id.identifier();
let mut span_name = identifier.as_str();
if !enabled!(tracing::Level::TRACE) {
span_name = get_contract_name(&identifier);
}
let span = debug_span!("suite", name = %span_name);
let span_local = span.clone();
let _guard = span_local.enter();
debug!("start executing all tests in contract");
let executor = self.tcfg.executor(
self.known_contracts.clone(),
self.analysis.clone(),
artifact_id,
db.clone(),
);
let runner = ContractRunner::new(
&identifier,
contract,
executor,
progress,
tokio_handle,
span,
self,
);
let r = runner.run_tests(filter);
debug!(duration=?r.duration, "executed all tests in contract");
r
}
}
/// Configuration for the test runner.
///
/// This is modified after instantiation through inline config.
#[derive(Clone, Debug)]
pub struct TestRunnerConfig {
/// Project config.
pub config: Arc<Config>,
/// Inline configuration.
pub inline_config: Arc<InlineConfig>,
/// EVM configuration.
pub evm_opts: EvmOpts,
/// EVM environment.
pub env: Env,
/// EVM version.
pub spec_id: SpecId,
/// The address which will be used to deploy the initial contracts and send all transactions.
pub sender: Address,
/// Whether to collect line coverage info
pub line_coverage: bool,
/// Whether to collect debug info
pub debug: bool,
/// Whether to enable steps tracking in the tracer.
pub decode_internal: InternalTraceMode,
/// Whether to enable call isolation.
pub isolation: bool,
/// Networks with enabled features.
pub networks: NetworkConfigs,
/// Whether to exit early on test failure or if test run interrupted.
pub early_exit: EarlyExit,
}
impl TestRunnerConfig {
/// Reconfigures all fields using the given `config`.
/// This is for example used to override the configuration with inline config.
pub fn reconfigure_with(&mut self, config: Arc<Config>) {
debug_assert!(!Arc::ptr_eq(&self.config, &config));
self.spec_id = config.evm_spec_id();
self.sender = config.sender;
self.networks = config.networks;
self.isolation = config.isolate;
// Specific to Forge, not present in config.
// self.line_coverage = N/A;
// self.debug = N/A;
// self.decode_internal = N/A;
// TODO: self.evm_opts
self.evm_opts.always_use_create_2_factory = config.always_use_create_2_factory;
// TODO: self.env
self.config = config;
}
/// Configures the given executor with this configuration.
pub fn configure_executor(&self, executor: &mut Executor) {
// TODO: See above
let inspector = executor.inspector_mut();
// inspector.set_env(&self.env);
if let Some(cheatcodes) = inspector.cheatcodes.as_mut() {
cheatcodes.config =
Arc::new(cheatcodes.config.clone_with(&self.config, self.evm_opts.clone()));
}
inspector.tracing(self.trace_mode());
inspector.collect_line_coverage(self.line_coverage);
inspector.enable_isolation(self.isolation);
inspector.networks(self.networks);
// inspector.set_create2_deployer(self.evm_opts.create2_deployer);
// executor.env_mut().clone_from(&self.env);
executor.set_spec_id(self.spec_id);
// executor.set_gas_limit(self.evm_opts.gas_limit());
executor.set_legacy_assertions(self.config.legacy_assertions);
}
/// Creates a new executor with this configuration.
pub fn executor(
&self,
known_contracts: ContractsByArtifact,
analysis: Arc<solar::sema::Compiler>,
artifact_id: &ArtifactId,
db: Backend,
) -> Executor {
let cheats_config = Arc::new(CheatsConfig::new(
&self.config,
self.evm_opts.clone(),
Some(known_contracts),
Some(artifact_id.clone()),
));
ExecutorBuilder::new()
.inspectors(|stack| {
stack
.cheatcodes(cheats_config)
.trace_mode(self.trace_mode())
.line_coverage(self.line_coverage)
.enable_isolation(self.isolation)
.networks(self.networks)
.create2_deployer(self.evm_opts.create2_deployer)
.set_analysis(analysis)
})
.spec_id(self.spec_id)
.gas_limit(self.evm_opts.gas_limit())
.legacy_assertions(self.config.legacy_assertions)
.build(self.env.clone(), db)
}
fn trace_mode(&self) -> TraceMode {
TraceMode::default()
.with_debug(self.debug)
.with_decode_internal(self.decode_internal)
.with_verbosity(self.evm_opts.verbosity)
.with_state_changes(verbosity() > 4)
}
}
/// Builder used for instantiating the multi-contract runner
#[derive(Clone)]
#[must_use = "builders do nothing unless you call `build` on them"]
pub struct MultiContractRunnerBuilder {
/// The address which will be used to deploy the initial contracts and send all
/// transactions
pub sender: Option<Address>,
/// The initial balance for each one of the deployed smart contracts
pub initial_balance: U256,
/// The EVM spec to use
pub evm_spec: Option<SpecId>,
/// The fork to use at launch
pub fork: Option<CreateFork>,
/// Project config.
pub config: Arc<Config>,
/// Whether or not to collect line coverage info
pub line_coverage: bool,
/// Whether or not to collect debug info
pub debug: bool,
/// Whether to enable steps tracking in the tracer.
pub decode_internal: InternalTraceMode,
/// Whether to enable call isolation
pub isolation: bool,
/// Networks with enabled features.
pub networks: NetworkConfigs,
/// Whether to exit early on test failure.
pub fail_fast: bool,
}
impl MultiContractRunnerBuilder {
pub fn new(config: Arc<Config>) -> Self {
Self {
config,
sender: Default::default(),
initial_balance: Default::default(),
evm_spec: Default::default(),
fork: Default::default(),
line_coverage: Default::default(),
debug: Default::default(),
isolation: Default::default(),
decode_internal: Default::default(),
networks: Default::default(),
fail_fast: false,
}
}
pub fn sender(mut self, sender: Address) -> Self {
self.sender = Some(sender);
self
}
pub fn initial_balance(mut self, initial_balance: U256) -> Self {
self.initial_balance = initial_balance;
self
}
pub fn evm_spec(mut self, spec: SpecId) -> Self {
self.evm_spec = Some(spec);
self
}
pub fn with_fork(mut self, fork: Option<CreateFork>) -> Self {
self.fork = fork;
self
}
pub fn set_coverage(mut self, enable: bool) -> Self {
self.line_coverage = enable;
self
}
pub fn set_debug(mut self, enable: bool) -> Self {
self.debug = enable;
self
}
pub fn set_decode_internal(mut self, mode: InternalTraceMode) -> Self {
self.decode_internal = mode;
self
}
pub fn fail_fast(mut self, fail_fast: bool) -> Self {
self.fail_fast = fail_fast;
self
}
pub fn enable_isolation(mut self, enable: bool) -> Self {
self.isolation = enable;
self
}
pub fn networks(mut self, networks: NetworkConfigs) -> Self {
self.networks = networks;
self
}
/// Given an EVM, proceeds to return a runner which is able to execute all tests
/// against that evm
pub fn build<C: Compiler<CompilerContract = Contract>>(
self,
output: &ProjectCompileOutput,
env: Env,
evm_opts: EvmOpts,
) -> Result<MultiContractRunner> {
let root = &self.config.root;
let contracts = output
.artifact_ids()
.map(|(id, v)| (id.with_stripped_file_prefixes(root), v))
.collect();
let linker = Linker::new(root, contracts);
// Build revert decoder from ABIs of all artifacts.
let abis = linker
.contracts
.iter()
.filter_map(|(_, contract)| contract.abi.as_ref().map(|abi| abi.borrow()));
let revert_decoder = RevertDecoder::new().with_abis(abis);
let LinkOutput { libraries, libs_to_deploy } = linker.link_with_nonce_or_address(
Default::default(),
LIBRARY_DEPLOYER,
0,
linker.contracts.keys(),
)?;
let linked_contracts = linker.get_linked_artifacts_cow(&libraries)?;
// Create a mapping of name => (abi, deployment code, Vec<library deployment code>)
let mut deployable_contracts = DeployableContracts::default();
for (id, contract) in linked_contracts.iter() {
let Some(abi) = contract.abi.as_ref() else { continue };
// if it's a test, link it and add to deployable contracts
if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true)
&& abi.functions().any(|func| func.name.is_any_test())
{
linker.ensure_linked(contract, id)?;
let Some(bytecode) =
contract.get_bytecode_bytes().map(|b| b.into_owned()).filter(|b| !b.is_empty())
else {
continue;
};
deployable_contracts
.insert(id.clone(), TestContract { abi: abi.clone().into_owned(), bytecode });
}
}
// Create known contracts from linked contracts and storage layout information (if any).
let known_contracts =
ContractsByArtifactBuilder::new(linked_contracts).with_output(output, root).build();
// Initialize and configure the solar compiler.
let mut analysis = solar::sema::Compiler::new(
solar::interface::Session::builder().with_stderr_emitter().build(),
);
let dcx = analysis.dcx_mut();
dcx.set_emitter(Box::new(
solar::interface::diagnostics::HumanEmitter::stderr(Default::default())
.source_map(Some(dcx.source_map().unwrap())),
));
dcx.set_flags_mut(|f| f.track_diagnostics = false);
// Populate solar's global context by parsing and lowering the sources.
let files: Vec<_> =
output.output().sources.as_ref().keys().map(|path| path.to_path_buf()).collect();
analysis.enter_mut(|compiler| -> Result<()> {
let mut pcx = compiler.parse();
configure_pcx_from_compile_output(
&mut pcx,
&self.config,
output,
if files.is_empty() { None } else { Some(&files) },
)?;
pcx.parse();
let _ = compiler.lower_asts();
Ok(())
})?;
let analysis = Arc::new(analysis);
let fuzz_literals = LiteralsDictionary::new(
Some(analysis.clone()),
Some(self.config.project_paths()),
self.config.fuzz.dictionary.max_fuzz_dictionary_literals,
);
Ok(MultiContractRunner {
contracts: deployable_contracts,
revert_decoder,
known_contracts,
libs_to_deploy,
libraries,
analysis,
fuzz_literals,
tcfg: TestRunnerConfig {
evm_opts,
env,
spec_id: self.evm_spec.unwrap_or_else(|| self.config.evm_spec_id()),
sender: self.sender.unwrap_or(self.config.sender),
line_coverage: self.line_coverage,
debug: self.debug,
decode_internal: self.decode_internal,
inline_config: Arc::new(InlineConfig::new_parsed(output, &self.config)?),
isolation: self.isolation,
networks: self.networks,
early_exit: EarlyExit::new(self.fail_fast),
config: self.config,
},
fork: self.fork,
})
}
}
pub fn matches_artifact(filter: &dyn TestFilter, id: &ArtifactId, abi: &JsonAbi) -> bool {
matches_contract(filter, &id.source, &id.name, abi.functions())
}
pub(crate) fn matches_contract(
filter: &dyn TestFilter,
path: &Path,
contract_name: &str,
functions: impl IntoIterator<Item = impl std::borrow::Borrow<Function>>,
) -> bool {
(filter.matches_path(path) && filter.matches_contract(contract_name))
&& functions.into_iter().any(|func| filter.matches_test_function(func.borrow()))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/gas_report.rs | crates/forge/src/gas_report.rs | //! Gas reports.
use crate::{
constants::{CHEATCODE_ADDRESS, HARDHAT_CONSOLE_ADDRESS},
traces::{CallTraceArena, CallTraceDecoder, CallTraceNode, DecodedCallData},
};
use alloy_primitives::map::HashSet;
use comfy_table::{Cell, Color, Table, modifiers::UTF8_ROUND_CORNERS, presets::ASCII_MARKDOWN};
use foundry_common::{TestFunctionExt, calc, shell};
use foundry_evm::traces::CallKind;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::{collections::BTreeMap, fmt::Display};
/// Represents the gas report for a set of contracts.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct GasReport {
/// Whether to report any contracts.
report_any: bool,
/// Contracts to generate the report for.
report_for: HashSet<String>,
/// Contracts to ignore when generating the report.
ignore: HashSet<String>,
/// Whether to include gas reports for tests.
include_tests: bool,
/// All contracts that were analyzed grouped by their identifier
/// ``test/Counter.t.sol:CounterTest
pub contracts: BTreeMap<String, ContractInfo>,
}
impl GasReport {
pub fn new(
report_for: impl IntoIterator<Item = String>,
ignore: impl IntoIterator<Item = String>,
include_tests: bool,
) -> Self {
let report_for = report_for.into_iter().collect::<HashSet<_>>();
let ignore = ignore.into_iter().collect::<HashSet<_>>();
let report_any = report_for.is_empty() || report_for.contains("*");
Self { report_any, report_for, ignore, include_tests, ..Default::default() }
}
/// Whether the given contract should be reported.
#[instrument(level = "trace", skip(self), ret)]
fn should_report(&self, contract_name: &str) -> bool {
if self.ignore.contains(contract_name) {
let contains_anyway = self.report_for.contains(contract_name);
if contains_anyway {
// If the user listed the contract in 'gas_reports' (the foundry.toml field) a
// report for the contract is generated even if it's listed in the ignore
// list. This is addressed this way because getting a report you don't expect is
// preferable than not getting one you expect. A warning is printed to stderr
// indicating the "double listing".
let _ = sh_warn!(
"{contract_name} is listed in both 'gas_reports' and 'gas_reports_ignore'."
);
}
return contains_anyway;
}
self.report_any || self.report_for.contains(contract_name)
}
/// Analyzes the given traces and generates a gas report.
pub async fn analyze(
&mut self,
arenas: impl IntoIterator<Item = &CallTraceArena>,
decoder: &CallTraceDecoder,
) {
for node in arenas.into_iter().flat_map(|arena| arena.nodes()) {
self.analyze_node(node, decoder).await;
}
}
async fn analyze_node(&mut self, node: &CallTraceNode, decoder: &CallTraceDecoder) {
let trace = &node.trace;
if trace.address == CHEATCODE_ADDRESS || trace.address == HARDHAT_CONSOLE_ADDRESS {
return;
}
let Some(name) = decoder.contracts.get(&node.trace.address) else { return };
let contract_name = name.rsplit(':').next().unwrap_or(name);
if !self.should_report(contract_name) {
return;
}
let contract_info = self.contracts.entry(name.to_string()).or_default();
let is_create_call = trace.kind.is_any_create();
// Record contract deployment size.
if is_create_call {
trace!(contract_name, "adding create size info");
contract_info.size = trace.data.len();
}
// Only include top-level calls which account for calldata and base (21.000) cost.
// Only include Calls and Creates as only these calls are isolated in inspector.
if trace.depth > 1 && (trace.kind == CallKind::Call || is_create_call) {
return;
}
let decoded = || decoder.decode_function(&node.trace);
if is_create_call {
trace!(contract_name, "adding create gas info");
contract_info.gas = trace.gas_used;
} else if let Some(DecodedCallData { signature, .. }) = decoded().await.call_data {
let name = signature.split('(').next().unwrap();
// ignore any test/setup functions
if self.include_tests || !name.test_function_kind().is_known() {
trace!(contract_name, signature, "adding gas info");
let gas_info = contract_info
.functions
.entry(name.to_string())
.or_default()
.entry(signature.clone())
.or_default();
gas_info.frames.push(trace.gas_used);
}
}
}
/// Finalizes the gas report by calculating the min, max, mean, and median for each function.
#[must_use]
pub fn finalize(mut self) -> Self {
trace!("finalizing gas report");
for contract in self.contracts.values_mut() {
for sigs in contract.functions.values_mut() {
for func in sigs.values_mut() {
func.frames.sort_unstable();
func.min = func.frames.first().copied().unwrap_or_default();
func.max = func.frames.last().copied().unwrap_or_default();
func.mean = calc::mean(&func.frames);
func.median = calc::median_sorted(&func.frames);
func.calls = func.frames.len() as u64;
}
}
}
self
}
}
impl Display for GasReport {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if shell::is_json() {
writeln!(f, "{}", &self.format_json_output())?;
} else {
for (name, contract) in &self.contracts {
if contract.functions.is_empty() {
trace!(name, "gas report contract without functions");
continue;
}
let table = self.format_table_output(contract, name);
writeln!(f, "\n{table}")?;
}
}
Ok(())
}
}
impl GasReport {
fn format_json_output(&self) -> String {
serde_json::to_string(
&self
.contracts
.iter()
.filter_map(|(name, contract)| {
if contract.functions.is_empty() {
trace!(name, "gas report contract without functions");
return None;
}
let functions = contract
.functions
.iter()
.flat_map(|(_, sigs)| {
sigs.iter().map(|(sig, gas_info)| {
let display_name = sig.replace(':', "");
(display_name, gas_info)
})
})
.collect::<BTreeMap<_, _>>();
Some(json!({
"contract": name,
"deployment": {
"gas": contract.gas,
"size": contract.size,
},
"functions": functions,
}))
})
.collect::<Vec<_>>(),
)
.unwrap()
}
fn format_table_output(&self, contract: &ContractInfo, name: &str) -> Table {
let mut table = Table::new();
if shell::is_markdown() {
table.load_preset(ASCII_MARKDOWN);
} else {
table.apply_modifier(UTF8_ROUND_CORNERS);
}
table.set_header(vec![Cell::new(format!("{name} Contract")).fg(Color::Magenta)]);
table.add_row(vec![
Cell::new("Deployment Cost").fg(Color::Cyan),
Cell::new("Deployment Size").fg(Color::Cyan),
]);
table.add_row(vec![
Cell::new(contract.gas.to_string()),
Cell::new(contract.size.to_string()),
]);
// Add a blank row to separate deployment info from function info.
table.add_row(vec![Cell::new("")]);
table.add_row(vec![
Cell::new("Function Name"),
Cell::new("Min").fg(Color::Green),
Cell::new("Avg").fg(Color::Yellow),
Cell::new("Median").fg(Color::Yellow),
Cell::new("Max").fg(Color::Red),
Cell::new("# Calls").fg(Color::Cyan),
]);
contract.functions.iter().for_each(|(fname, sigs)| {
sigs.iter().for_each(|(sig, gas_info)| {
// Show function signature if overloaded else display function name.
let display_name =
if sigs.len() == 1 { fname.to_string() } else { sig.replace(':', "") };
table.add_row(vec![
Cell::new(display_name),
Cell::new(gas_info.min.to_string()).fg(Color::Green),
Cell::new(gas_info.mean.to_string()).fg(Color::Yellow),
Cell::new(gas_info.median.to_string()).fg(Color::Yellow),
Cell::new(gas_info.max.to_string()).fg(Color::Red),
Cell::new(gas_info.calls.to_string()),
]);
})
});
table
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct ContractInfo {
pub gas: u64,
pub size: usize,
/// Function name -> Function signature -> GasInfo
pub functions: BTreeMap<String, BTreeMap<String, GasInfo>>,
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct GasInfo {
pub calls: u64,
pub min: u64,
pub mean: u64,
pub median: u64,
pub max: u64,
#[serde(skip)]
pub frames: Vec<u64>,
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/opts.rs | crates/forge/src/opts.rs | use crate::cmd::{
bind::BindArgs, bind_json, build::BuildArgs, cache::CacheArgs, clone::CloneArgs,
compiler::CompilerArgs, config, coverage, create::CreateArgs, doc::DocArgs, eip712, flatten,
fmt::FmtArgs, geiger, generate, init::InitArgs, inspect, install::InstallArgs, lint::LintArgs,
remappings::RemappingArgs, remove::RemoveArgs, selectors::SelectorsSubcommands, snapshot,
soldeer, test, tree, update,
};
use clap::{Parser, Subcommand, ValueHint};
use forge_script::ScriptArgs;
use forge_verify::{VerifyArgs, VerifyBytecodeArgs, VerifyCheckArgs};
use foundry_cli::opts::GlobalArgs;
use foundry_common::version::{LONG_VERSION, SHORT_VERSION};
use std::path::PathBuf;
/// Build, test, fuzz, debug and deploy Solidity contracts.
#[derive(Parser)]
#[command(
name = "forge",
version = SHORT_VERSION,
long_version = LONG_VERSION,
after_help = "Find more information in the book: https://getfoundry.sh/forge/overview",
next_display_order = None,
)]
pub struct Forge {
/// Include the global arguments.
#[command(flatten)]
pub global: GlobalArgs,
#[command(subcommand)]
pub cmd: ForgeSubcommand,
}
#[derive(Subcommand)]
pub enum ForgeSubcommand {
/// Run the project's tests.
#[command(visible_alias = "t")]
Test(test::TestArgs),
/// Run a smart contract as a script, building transactions that can be sent onchain.
Script(ScriptArgs),
/// Generate coverage reports.
Coverage(coverage::CoverageArgs),
/// Generate Rust bindings for smart contracts.
#[command(alias = "bi")]
Bind(BindArgs),
/// Build the project's smart contracts.
#[command(visible_aliases = ["b", "compile"])]
Build(BuildArgs),
/// Clone a contract from Etherscan.
Clone(CloneArgs),
/// Update one or multiple dependencies.
///
/// If no arguments are provided, then all dependencies are updated.
#[command(visible_alias = "u")]
Update(update::UpdateArgs),
/// Install one or multiple dependencies.
///
/// If no arguments are provided, then existing dependencies will be installed.
#[command(visible_aliases = ["i", "add"])]
Install(InstallArgs),
/// Remove one or multiple dependencies.
#[command(visible_alias = "rm")]
Remove(RemoveArgs),
/// Get the automatically inferred remappings for the project.
#[command(visible_alias = "re")]
Remappings(RemappingArgs),
/// Verify smart contracts on Etherscan.
#[command(visible_alias = "v")]
VerifyContract(VerifyArgs),
/// Check verification status on Etherscan.
#[command(visible_alias = "vc")]
VerifyCheck(VerifyCheckArgs),
/// Verify the deployed bytecode against its source on Etherscan.
#[command(visible_alias = "vb")]
VerifyBytecode(VerifyBytecodeArgs),
/// Deploy a smart contract.
#[command(visible_alias = "c")]
Create(CreateArgs),
/// Create a new Forge project.
Init(InitArgs),
/// Generate shell completions script.
#[command(visible_alias = "com")]
Completions {
#[arg(value_enum)]
shell: foundry_cli::clap::Shell,
},
/// Remove the build artifacts and cache directories.
#[command(visible_alias = "cl")]
Clean {
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
},
/// Manage the Foundry cache.
Cache(CacheArgs),
/// Create a gas snapshot of each test's gas usage.
#[command(visible_alias = "s")]
Snapshot(snapshot::GasSnapshotArgs),
/// Display the current config.
#[command(visible_alias = "co")]
Config(config::ConfigArgs),
/// Flatten a source file and all of its imports into one file.
#[command(visible_alias = "f")]
Flatten(flatten::FlattenArgs),
/// Format Solidity source files.
Fmt(FmtArgs),
/// Lint Solidity source files
#[command(visible_alias = "l")]
Lint(LintArgs),
/// Get specialized information about a smart contract.
#[command(visible_alias = "in")]
Inspect(inspect::InspectArgs),
/// Display a tree visualization of the project's dependency graph.
#[command(visible_alias = "tr")]
Tree(tree::TreeArgs),
/// DEPRECATED: Detects usage of unsafe cheat codes in a project and its dependencies.
///
/// This is an alias for `forge lint --only-lint unsafe-cheatcode`.
Geiger(geiger::GeigerArgs),
/// Generate documentation for the project.
Doc(DocArgs),
/// Function selector utilities.
#[command(visible_alias = "se")]
Selectors {
#[command(subcommand)]
command: SelectorsSubcommands,
},
/// Generate scaffold files.
#[command(hide = true)]
Generate(generate::GenerateArgs),
/// Compiler utilities.
Compiler(CompilerArgs),
/// Soldeer dependency manager.
Soldeer(soldeer::SoldeerArgs),
/// Generate EIP-712 struct encodings for structs from a given file.
Eip712(eip712::Eip712Args),
/// Generate bindings for serialization/deserialization of project structs via JSON cheatcodes.
BindJson(bind_json::BindJsonArgs),
}
#[cfg(test)]
mod tests {
use super::*;
use clap::CommandFactory;
#[test]
fn verify_cli() {
Forge::command().debug_assert();
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/config.rs | crates/forge/src/cmd/config.rs | use super::build::BuildArgs;
use clap::Parser;
use eyre::Result;
use foundry_cli::{opts::EvmArgs, utils::LoadConfig};
use foundry_common::shell;
use foundry_config::fix::fix_tomls;
foundry_config::impl_figment_convert!(ConfigArgs, build, evm);
/// CLI arguments for `forge config`.
#[derive(Clone, Debug, Parser)]
pub struct ConfigArgs {
/// Print only a basic set of the currently set config values.
#[arg(long)]
basic: bool,
/// Attempt to fix any configuration warnings.
#[arg(long)]
fix: bool,
// support nested build arguments
#[command(flatten)]
build: BuildArgs,
#[command(flatten)]
evm: EvmArgs,
}
impl ConfigArgs {
pub fn run(self) -> Result<()> {
if self.fix {
for warning in fix_tomls() {
sh_warn!("{warning}")?;
}
return Ok(());
}
let config = self
.load_config_unsanitized()?
.normalized_optimizer_settings()
// we explicitly normalize the version, so mimic the behavior when invoking solc
.normalized_evm_version();
let s = if self.basic {
let config = config.into_basic();
if shell::is_json() {
serde_json::to_string_pretty(&config)?
} else {
config.to_string_pretty()?
}
} else if shell::is_json() {
serde_json::to_string_pretty(&config)?
} else {
config.to_string_pretty()?
};
sh_println!("{s}")?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/bind.rs | crates/forge/src/cmd/bind.rs | use alloy_primitives::map::HashSet;
use clap::{Parser, ValueHint};
use eyre::Result;
use forge_sol_macro_gen::{MultiSolMacroGen, SolMacroGen};
use foundry_cli::{opts::BuildOpts, utils::LoadConfig};
use foundry_common::{compile::ProjectCompiler, fs::json_files};
use foundry_config::impl_figment_convert;
use regex::Regex;
use std::{
fs,
path::{Path, PathBuf},
};
impl_figment_convert!(BindArgs, build);
const DEFAULT_CRATE_NAME: &str = "foundry-contracts";
const DEFAULT_CRATE_VERSION: &str = "0.1.0";
/// CLI arguments for `forge bind`.
#[derive(Clone, Debug, Parser)]
pub struct BindArgs {
/// Path to where the contract artifacts are stored.
#[arg(
long = "bindings-path",
short,
value_hint = ValueHint::DirPath,
value_name = "PATH"
)]
pub bindings: Option<PathBuf>,
/// Create bindings only for contracts whose names match the specified filter(s)
#[arg(long)]
pub select: Vec<regex::Regex>,
/// Explicitly generate bindings for all contracts
///
/// By default all contracts ending with `Test` or `Script` are excluded.
#[arg(long, conflicts_with_all = &["select", "skip"])]
pub select_all: bool,
/// The name of the Rust crate to generate.
///
/// This should be a valid crates.io crate name,
/// however, this is not currently validated by this command.
#[arg(long, default_value = DEFAULT_CRATE_NAME, value_name = "NAME")]
crate_name: String,
/// The version of the Rust crate to generate.
///
/// This should be a standard semver version string,
/// however, this is not currently validated by this command.
#[arg(long, default_value = DEFAULT_CRATE_VERSION, value_name = "VERSION")]
crate_version: String,
/// The description of the Rust crate to generate.
///
/// This will be added to the package.description field in Cargo.toml.
#[arg(long, default_value = "", value_name = "DESCRIPTION")]
crate_description: String,
/// The license of the Rust crate to generate.
///
/// This will be added to the package.license field in Cargo.toml.
#[arg(long, value_name = "LICENSE", default_value = "")]
crate_license: String,
/// Generate the bindings as a module instead of a crate.
#[arg(long)]
module: bool,
/// Overwrite existing generated bindings.
///
/// By default, the command will check that the bindings are correct, and then exit. If
/// --overwrite is passed, it will instead delete and overwrite the bindings.
#[arg(long)]
overwrite: bool,
/// Generate bindings as a single file.
#[arg(long)]
single_file: bool,
/// Skip Cargo.toml consistency checks.
#[arg(long)]
skip_cargo_toml: bool,
/// Skips running forge build before generating binding
#[arg(long)]
skip_build: bool,
/// Don't add any additional derives to generated bindings
#[arg(long)]
skip_extra_derives: bool,
/// Generate bindings for the `alloy` library, instead of `ethers`.
#[arg(long, hide = true)]
alloy: bool,
/// Specify the `alloy` version on Crates.
#[arg(long)]
alloy_version: Option<String>,
/// Specify the `alloy` revision on GitHub.
#[arg(long, conflicts_with = "alloy_version")]
alloy_rev: Option<String>,
/// Generate bindings for the `ethers` library (removed), instead of `alloy`.
#[arg(long, hide = true)]
ethers: bool,
#[command(flatten)]
build: BuildOpts,
}
impl BindArgs {
pub fn run(self) -> Result<()> {
if self.ethers {
eyre::bail!("`--ethers` bindings have been removed. Use `--alloy` (default) instead.");
}
if !self.skip_build {
let project = self.build.project()?;
let _ = ProjectCompiler::new().compile(&project)?;
}
let config = self.load_config()?;
let artifacts = config.out;
let bindings_root = self.bindings.clone().unwrap_or_else(|| artifacts.join("bindings"));
if bindings_root.exists() {
if !self.overwrite {
sh_println!("Bindings found. Checking for consistency.")?;
return self.check_existing_bindings(&artifacts, &bindings_root);
}
trace!(?artifacts, "Removing existing bindings");
fs::remove_dir_all(&bindings_root)?;
}
self.generate_bindings(&artifacts, &bindings_root)?;
sh_println!("Bindings have been generated to {}", bindings_root.display())?;
Ok(())
}
fn get_filter(&self) -> Result<Filter> {
if self.select_all {
// Select all json files
return Ok(Filter::All);
}
if !self.select.is_empty() {
// Return json files that match the select regex
return Ok(Filter::Select(self.select.clone()));
}
if let Some(skip) = self.build.skip.as_ref().filter(|s| !s.is_empty()) {
return Ok(Filter::Skip(
skip.clone()
.into_iter()
.map(|s| Regex::new(s.file_pattern()))
.collect::<Result<Vec<_>, _>>()?,
));
}
// Exclude defaults
Ok(Filter::skip_default())
}
/// Returns an iterator over the JSON files and the contract name in the `artifacts` directory.
fn get_json_files(&self, artifacts: &Path) -> Result<impl Iterator<Item = (String, PathBuf)>> {
let filter = self.get_filter()?;
Ok(json_files(artifacts)
.filter_map(|path| {
// Ignore the build info JSON.
if path.to_str()?.contains("build-info") {
return None;
}
// Ignore the `target` directory in case the user has built the project.
if path.iter().any(|comp| comp == "target") {
return None;
}
// We don't want `.metadata.json` files.
let stem = path.file_stem()?.to_str()?;
if stem.ends_with(".metadata") {
return None;
}
let name = stem.split('.').next().unwrap();
// Best effort identifier cleanup.
let name = name.replace(char::is_whitespace, "").replace('-', "_");
Some((name, path))
})
.filter(move |(name, _path)| filter.is_match(name)))
}
fn get_solmacrogen(&self, artifacts: &Path) -> Result<MultiSolMacroGen> {
let mut dup = HashSet::<String>::default();
let instances = self
.get_json_files(artifacts)?
.filter_map(|(name, path)| {
trace!(?path, "parsing SolMacroGen from file");
if dup.insert(name.clone()) { Some(SolMacroGen::new(path, name)) } else { None }
})
.collect::<Vec<_>>();
let multi = MultiSolMacroGen::new(instances);
eyre::ensure!(!multi.instances.is_empty(), "No contract artifacts found");
Ok(multi)
}
/// Check that the existing bindings match the expected abigen output
fn check_existing_bindings(&self, artifacts: &Path, bindings_root: &Path) -> Result<()> {
let mut bindings = self.get_solmacrogen(artifacts)?;
bindings.generate_bindings(!self.skip_extra_derives)?;
sh_println!("Checking bindings for {} contracts", bindings.instances.len())?;
bindings.check_consistency(
&self.crate_name,
&self.crate_version,
bindings_root,
self.single_file,
!self.skip_cargo_toml,
self.module,
self.alloy_version.clone(),
self.alloy_rev.clone(),
)?;
sh_println!("OK.")?;
Ok(())
}
/// Generate the bindings
fn generate_bindings(&self, artifacts: &Path, bindings_root: &Path) -> Result<()> {
let mut solmacrogen = self.get_solmacrogen(artifacts)?;
sh_println!("Generating bindings for {} contracts", solmacrogen.instances.len())?;
if !self.module {
trace!(single_file = self.single_file, "generating crate");
solmacrogen.write_to_crate(
&self.crate_name,
&self.crate_version,
&self.crate_description,
&self.crate_license,
bindings_root,
self.single_file,
self.alloy_version.clone(),
self.alloy_rev.clone(),
!self.skip_extra_derives,
)?;
} else {
trace!(single_file = self.single_file, "generating module");
solmacrogen.write_to_module(
bindings_root,
self.single_file,
!self.skip_extra_derives,
)?;
}
Ok(())
}
}
pub enum Filter {
All,
Select(Vec<regex::Regex>),
Skip(Vec<regex::Regex>),
}
impl Filter {
pub fn is_match(&self, name: &str) -> bool {
match self {
Self::All => true,
Self::Select(regexes) => regexes.iter().any(|regex| regex.is_match(name)),
Self::Skip(regexes) => !regexes.iter().any(|regex| regex.is_match(name)),
}
}
pub fn skip_default() -> Self {
let skip = [
".*Test.*",
".*Script",
"console[2]?",
"CommonBase",
"Components",
"[Ss]td(Chains|Math|Error|Json|Utils|Cheats|Style|Invariant|Assertions|Toml|Storage(Safe)?)",
"[Vv]m.*",
"IMulticall3",
]
.iter()
.map(|pattern| regex::Regex::new(pattern).unwrap())
.collect::<Vec<_>>();
Self::Skip(skip)
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/coverage.rs | crates/forge/src/cmd/coverage.rs | use super::{install, test::TestArgs, watch::WatchArgs};
use crate::coverage::{
BytecodeReporter, ContractId, CoverageReport, CoverageReporter, CoverageSummaryReporter,
DebugReporter, ItemAnchor, LcovReporter,
analysis::{SourceAnalysis, SourceFiles},
anchors::find_anchors,
};
use alloy_primitives::{Address, Bytes, U256, map::HashMap};
use clap::{Parser, ValueEnum, ValueHint};
use eyre::Result;
use foundry_cli::utils::{LoadConfig, STATIC_FUZZ_SEED};
use foundry_common::{compile::ProjectCompiler, errors::convert_solar_errors};
use foundry_compilers::{
Artifact, ArtifactId, Project, ProjectCompileOutput, ProjectPathsConfig, VYPER_EXTENSIONS,
artifacts::{CompactBytecode, CompactDeployedBytecode, sourcemap::SourceMap},
};
use foundry_config::Config;
use foundry_evm::{core::ic::IcPcMap, opts::EvmOpts};
use rayon::prelude::*;
use semver::{Version, VersionReq};
use std::path::{Path, PathBuf};
// Loads project's figment and merges the build cli arguments into it
foundry_config::impl_figment_convert!(CoverageArgs, test);
/// CLI arguments for `forge coverage`.
#[derive(Parser)]
pub struct CoverageArgs {
/// The report type to use for coverage.
///
/// This flag can be used multiple times.
#[arg(long, value_enum, default_value = "summary")]
report: Vec<CoverageReportKind>,
/// The version of the LCOV "tracefile" format to use.
///
/// Format: `MAJOR[.MINOR]`.
///
/// Main differences:
/// - `1.x`: The original v1 format.
/// - `2.0`: Adds support for "line end" numbers for functions.
/// - `2.2`: Changes the format of functions.
#[arg(long, default_value = "1", value_parser = parse_lcov_version)]
lcov_version: Version,
/// Enable viaIR with minimum optimization
///
/// This can fix most of the "stack too deep" errors while resulting a
/// relatively accurate source map.
#[arg(long)]
ir_minimum: bool,
/// The path to output the report.
///
/// If not specified, the report will be stored in the root of the project.
#[arg(
long,
short,
value_hint = ValueHint::FilePath,
value_name = "PATH"
)]
report_file: Option<PathBuf>,
/// Whether to include libraries in the coverage report.
#[arg(long)]
include_libs: bool,
/// Whether to exclude tests from the coverage report.
#[arg(long)]
exclude_tests: bool,
/// The coverage reporters to use. Constructed from the other fields.
#[arg(skip)]
reporters: Vec<Box<dyn CoverageReporter>>,
#[command(flatten)]
test: TestArgs,
}
impl CoverageArgs {
pub async fn run(mut self) -> Result<()> {
let (mut config, evm_opts) = self.load_config_and_evm_opts()?;
// install missing dependencies
if install::install_missing_dependencies(&mut config).await && config.auto_detect_remappings
{
// need to re-configure here to also catch additional remappings
config = self.load_config()?;
}
// Set fuzz seed so coverage reports are deterministic
config.fuzz.seed = Some(U256::from_be_bytes(STATIC_FUZZ_SEED));
let (paths, mut output) = {
let (project, output) = self.build(&config)?;
(project.paths, output)
};
self.populate_reporters(&paths.root);
sh_println!("Analysing contracts...")?;
let report = self.prepare(&paths, &mut output)?;
sh_println!("Running tests...")?;
self.collect(&paths.root, &output, report, config, evm_opts).await
}
fn populate_reporters(&mut self, root: &Path) {
self.reporters = self
.report
.iter()
.map(|report_kind| match report_kind {
CoverageReportKind::Summary => {
Box::<CoverageSummaryReporter>::default() as Box<dyn CoverageReporter>
}
CoverageReportKind::Lcov => {
let path =
root.join(self.report_file.as_deref().unwrap_or("lcov.info".as_ref()));
Box::new(LcovReporter::new(path, self.lcov_version.clone()))
}
CoverageReportKind::Bytecode => Box::new(BytecodeReporter::new(
root.to_path_buf(),
root.join("bytecode-coverage"),
)),
CoverageReportKind::Debug => Box::new(DebugReporter),
})
.collect::<Vec<_>>();
}
/// Builds the project.
fn build(&self, config: &Config) -> Result<(Project, ProjectCompileOutput)> {
let mut project = config.ephemeral_project()?;
if self.ir_minimum {
sh_warn!(
"`--ir-minimum` enables `viaIR` with minimum optimization, \
which can result in inaccurate source mappings.\n\
Only use this flag as a workaround if you are experiencing \"stack too deep\" errors.\n\
Note that `viaIR` is production ready since Solidity 0.8.13 and above.\n\
See more: https://github.com/foundry-rs/foundry/issues/3357"
)?;
} else {
sh_warn!(
"optimizer settings and `viaIR` have been disabled for accurate coverage reports.\n\
If you encounter \"stack too deep\" errors, consider using `--ir-minimum` which \
enables `viaIR` with minimum optimization resolving most of the errors"
)?;
}
config.disable_optimizations(&mut project, self.ir_minimum);
let output = ProjectCompiler::default()
.compile(&project)?
.with_stripped_file_prefixes(project.root());
Ok((project, output))
}
/// Builds the coverage report.
#[instrument(name = "Coverage::prepare", skip_all)]
fn prepare(
&self,
project_paths: &ProjectPathsConfig,
output: &mut ProjectCompileOutput,
) -> Result<CoverageReport> {
let mut report = CoverageReport::default();
output.parser_mut().solc_mut().compiler_mut().enter_mut(|compiler| {
if compiler.gcx().stage() < Some(solar::config::CompilerStage::Lowering) {
let _ = compiler.lower_asts();
}
convert_solar_errors(compiler.dcx())
})?;
let output = &*output;
// Collect source files.
let mut versioned_sources = HashMap::<Version, SourceFiles>::default();
for (path, source_file, version) in output.output().sources.sources_with_version() {
// Filter out vyper sources.
if path
.extension()
.and_then(|s| s.to_str())
.is_some_and(|ext| VYPER_EXTENSIONS.contains(&ext))
{
continue;
}
report.add_source(version.clone(), source_file.id as usize, path.clone());
// Filter out libs dependencies and tests.
if (!self.include_libs && project_paths.has_library_ancestor(path))
|| (self.exclude_tests && project_paths.is_test(path))
{
continue;
}
let path = project_paths.root.join(path);
versioned_sources
.entry(version.clone())
.or_default()
.sources
.insert(source_file.id, path);
}
// Get source maps and bytecodes.
let artifacts: Vec<ArtifactData> = output
.artifact_ids()
.par_bridge() // This parses source maps, so we want to run it in parallel.
.filter_map(|(id, artifact)| {
let source_id = report.get_source_id(id.version.clone(), id.source.clone())?;
ArtifactData::new(&id, source_id, artifact)
})
.collect();
// Add coverage items.
for (version, sources) in &versioned_sources {
let source_analysis = SourceAnalysis::new(sources, output)?;
let anchors = artifacts
.par_iter()
.filter(|artifact| artifact.contract_id.version == *version)
.map(|artifact| {
let creation_code_anchors = artifact.creation.find_anchors(&source_analysis);
let deployed_code_anchors = artifact.deployed.find_anchors(&source_analysis);
(artifact.contract_id.clone(), (creation_code_anchors, deployed_code_anchors))
})
.collect_vec_list();
report.add_anchors(anchors.into_iter().flatten());
report.add_analysis(version.clone(), source_analysis);
}
if self.reporters.iter().any(|reporter| reporter.needs_source_maps()) {
report.add_source_maps(artifacts.into_iter().map(|artifact| {
(artifact.contract_id, (artifact.creation.source_map, artifact.deployed.source_map))
}));
}
Ok(report)
}
/// Runs tests, collects coverage data and generates the final report.
#[instrument(name = "Coverage::collect", skip_all)]
async fn collect(
mut self,
project_root: &Path,
output: &ProjectCompileOutput,
mut report: CoverageReport,
config: Config,
evm_opts: EvmOpts,
) -> Result<()> {
let filter = self.test.filter(&config)?;
let outcome =
self.test.run_tests(project_root, config, evm_opts, output, &filter, true).await?;
let known_contracts = outcome.runner.as_ref().unwrap().known_contracts.clone();
// Add hit data to the coverage report
let data = outcome.results.iter().flat_map(|(_, suite)| {
let mut hits = Vec::new();
for result in suite.test_results.values() {
let Some(hit_maps) = result.line_coverage.as_ref() else { continue };
for map in hit_maps.0.values() {
if let Some((id, _)) = known_contracts.find_by_deployed_code(map.bytecode()) {
hits.push((id, map, true));
} else if let Some((id, _)) =
known_contracts.find_by_creation_code(map.bytecode())
{
hits.push((id, map, false));
}
}
}
hits
});
for (artifact_id, map, is_deployed_code) in data {
if let Some(source_id) =
report.get_source_id(artifact_id.version.clone(), artifact_id.source.clone())
{
report.add_hit_map(
&ContractId {
version: artifact_id.version.clone(),
source_id,
contract_name: artifact_id.name.as_str().into(),
},
map,
is_deployed_code,
)?;
}
}
// Filter out ignored sources from the report.
if let Some(not_re) = &filter.args().coverage_pattern_inverse {
let file_root = filter.paths().root.as_path();
report.retain_sources(|path: &Path| {
let path = path.strip_prefix(file_root).unwrap_or(path);
!not_re.is_match(&path.to_string_lossy())
});
}
// Output final reports.
self.report(&report)?;
// Check for test failures after generating coverage report.
// This ensures coverage data is written even when tests fail.
outcome.ensure_ok(false)?;
Ok(())
}
#[instrument(name = "Coverage::report", skip_all)]
fn report(&mut self, report: &CoverageReport) -> Result<()> {
for reporter in &mut self.reporters {
let _guard = debug_span!("reporter.report", kind=%reporter.name()).entered();
reporter.report(report)?;
}
Ok(())
}
pub fn is_watch(&self) -> bool {
self.test.is_watch()
}
pub fn watch(&self) -> &WatchArgs {
&self.test.watch
}
}
/// Coverage reports to generate.
#[derive(Clone, Debug, Default, ValueEnum)]
pub enum CoverageReportKind {
#[default]
Summary,
Lcov,
Debug,
Bytecode,
}
/// Helper function that will link references in unlinked bytecode to the 0 address.
///
/// This is needed in order to analyze the bytecode for contracts that use libraries.
fn dummy_link_bytecode(mut obj: CompactBytecode) -> Option<Bytes> {
let link_references = obj.link_references.clone();
for (file, libraries) in link_references {
for library in libraries.keys() {
obj.link(&file, library, Address::ZERO);
}
}
obj.object.resolve();
obj.object.into_bytes()
}
/// Helper function that will link references in unlinked bytecode to the 0 address.
///
/// This is needed in order to analyze the bytecode for contracts that use libraries.
fn dummy_link_deployed_bytecode(obj: CompactDeployedBytecode) -> Option<Bytes> {
obj.bytecode.and_then(dummy_link_bytecode)
}
pub struct ArtifactData {
pub contract_id: ContractId,
pub creation: BytecodeData,
pub deployed: BytecodeData,
}
impl ArtifactData {
pub fn new(id: &ArtifactId, source_id: usize, artifact: &impl Artifact) -> Option<Self> {
Some(Self {
contract_id: ContractId {
version: id.version.clone(),
source_id,
contract_name: id.name.as_str().into(),
},
creation: BytecodeData::new(
artifact.get_source_map()?.ok()?,
artifact
.get_bytecode()
.and_then(|bytecode| dummy_link_bytecode(bytecode.into_owned()))?,
),
deployed: BytecodeData::new(
artifact.get_source_map_deployed()?.ok()?,
artifact
.get_deployed_bytecode()
.and_then(|bytecode| dummy_link_deployed_bytecode(bytecode.into_owned()))?,
),
})
}
}
pub struct BytecodeData {
source_map: SourceMap,
bytecode: Bytes,
/// The instruction counter to program counter mapping.
///
/// The source maps are indexed by *instruction counters*, which are the indexes of
/// instructions in the bytecode *minus any push bytes*.
///
/// Since our line coverage inspector collects hit data using program counters, the anchors
/// also need to be based on program counters.
ic_pc_map: IcPcMap,
}
impl BytecodeData {
fn new(source_map: SourceMap, bytecode: Bytes) -> Self {
let ic_pc_map = IcPcMap::new(&bytecode);
Self { source_map, bytecode, ic_pc_map }
}
pub fn find_anchors(&self, source_analysis: &SourceAnalysis) -> Vec<ItemAnchor> {
find_anchors(&self.bytecode, &self.source_map, &self.ic_pc_map, source_analysis)
}
}
fn parse_lcov_version(s: &str) -> Result<Version, String> {
let vr = VersionReq::parse(&format!("={s}")).map_err(|e| e.to_string())?;
let [c] = &vr.comparators[..] else {
return Err("invalid version".to_string());
};
if c.op != semver::Op::Exact {
return Err("invalid version".to_string());
}
if !c.pre.is_empty() {
return Err("pre-releases are not supported".to_string());
}
Ok(Version::new(c.major, c.minor.unwrap_or(0), c.patch.unwrap_or(0)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn lcov_version() {
assert_eq!(parse_lcov_version("0").unwrap(), Version::new(0, 0, 0));
assert_eq!(parse_lcov_version("1").unwrap(), Version::new(1, 0, 0));
assert_eq!(parse_lcov_version("1.0").unwrap(), Version::new(1, 0, 0));
assert_eq!(parse_lcov_version("1.1").unwrap(), Version::new(1, 1, 0));
assert_eq!(parse_lcov_version("1.11").unwrap(), Version::new(1, 11, 0));
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/soldeer.rs | crates/forge/src/cmd/soldeer.rs | use clap::Parser;
use eyre::Result;
use foundry_common::shell;
use soldeer_commands::{Command, Verbosity};
/// Available subcommands for Soldeer, see <https://github.com/mario-eth/soldeer/blob/main/crates/commands/src/lib.rs>
/// for more information
#[derive(Clone, Debug, Parser)]
#[command(
override_usage = "Native Solidity Package Manager, run `forge soldeer [COMMAND] --help` for more details"
)]
pub struct SoldeerArgs {
/// Command must be one of the following init/install/login/push/uninstall/update/version.
#[command(subcommand)]
command: Command,
}
impl SoldeerArgs {
pub async fn run(self) -> Result<()> {
let verbosity = Verbosity::new(shell::verbosity(), if shell::is_quiet() { 1 } else { 0 });
match soldeer_commands::run(self.command, verbosity).await {
Ok(_) => Ok(()),
Err(err) => Err(eyre::eyre!("Failed to run soldeer: {err}")),
}
}
}
#[cfg(test)]
mod tests {
use soldeer_commands::{Command, Verbosity, commands::Version};
#[tokio::test]
async fn test_soldeer_version() {
let command = Command::Version(Version::default());
assert!(soldeer_commands::run(command, Verbosity::new(0, 1)).await.is_ok());
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/update.rs | crates/forge/src/cmd/update.rs | use crate::{DepIdentifier, DepMap, Lockfile};
use alloy_primitives::map::HashMap;
use clap::{Parser, ValueHint};
use eyre::{Context, Result};
use foundry_cli::{
opts::Dependency,
utils::{CommandUtils, Git, LoadConfig},
};
use foundry_config::{Config, impl_figment_convert_basic};
use std::path::{Path, PathBuf};
use yansi::Paint;
/// CLI arguments for `forge update`.
#[derive(Clone, Debug, Parser)]
pub struct UpdateArgs {
/// The dependencies you want to update.
dependencies: Vec<Dependency>,
/// The project's root path.
///
/// By default root of the Git repository, if in one,
/// or the current working directory.
#[arg(long, value_hint = ValueHint::DirPath, value_name = "PATH")]
root: Option<PathBuf>,
/// Override the up-to-date check.
#[arg(short, long)]
force: bool,
/// Recursively update submodules.
#[arg(short, long)]
recursive: bool,
}
impl_figment_convert_basic!(UpdateArgs);
impl UpdateArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
// dep_overrides consists of absolute paths of dependencies and their tags
let (root, _paths, dep_overrides) = dependencies_paths(&self.dependencies, &config)?;
// Mapping of relative path of lib to its tag type
// e.g "lib/forge-std" -> DepIdentifier::Tag { name: "v0.1.0", rev: "1234567" }
let git = Git::new(&root);
let mut foundry_lock = Lockfile::new(&config.root).with_git(&git);
let out_of_sync_deps = foundry_lock.sync(config.install_lib_dir())?;
// update the submodules' tags if any overrides are present
let mut prev_dep_ids: DepMap = HashMap::default();
if dep_overrides.is_empty() {
// running `forge update`, update all deps
foundry_lock.iter_mut().for_each(|(_path, dep_id)| {
// Set r#override flag to true if the dep is a branch
if let DepIdentifier::Branch { .. } = dep_id {
dep_id.mark_override();
}
});
} else {
for (dep_path, override_tag) in &dep_overrides {
let rel_path = dep_path
.strip_prefix(&root)
.wrap_err("Dependency path is not relative to the repository root")?;
if let Ok(mut dep_id) = DepIdentifier::resolve_type(&git, dep_path, override_tag) {
// Store the previous state before overriding
let prev = foundry_lock.get(rel_path).cloned();
// If it's a branch, mark it as overridden so it gets updated below
if let DepIdentifier::Branch { .. } = dep_id {
dep_id.mark_override();
}
// Update the lockfile
foundry_lock.override_dep(rel_path, dep_id)?;
// Only track as updated if there was a previous dependency
if let Some(prev) = prev {
prev_dep_ids.insert(rel_path.to_owned(), prev);
}
} else {
sh_warn!(
"Could not r#override submodule at {} with tag {}, try using forge install",
rel_path.display(),
override_tag
)?;
}
}
}
// fetch the latest changes for each submodule (recursively if flag is set)
let git = Git::new(&root);
let update_paths = self.update_dep_paths(&foundry_lock);
trace!(?update_paths, "updating deps at");
if self.recursive {
// update submodules recursively
git.submodule_update(self.force, true, false, true, update_paths)?;
} else {
let is_empty = update_paths.is_empty();
// update submodules
git.submodule_update(self.force, true, false, false, update_paths)?;
if !is_empty {
// initialize submodules of each submodule recursively (otherwise direct submodule
// dependencies will revert to last commit)
git.submodule_foreach(false, "git submodule update --init --progress --recursive")?;
}
}
// Update branches to their latest commit from origin
// This handles both explicit updates (forge update dep@branch) and
// general updates (forge update) for branch-tracked dependencies
let branch_overrides = foundry_lock
.iter_mut()
.filter_map(|(path, dep_id)| {
if dep_id.is_branch() && dep_id.overridden() {
return Some((path, dep_id));
}
None
})
.collect::<Vec<_>>();
for (path, dep_id) in branch_overrides {
let submodule_path = root.join(path);
let name = dep_id.name();
// Fetch and checkout the latest commit from the remote branch
Self::fetch_and_checkout_branch(&git, &submodule_path, name)?;
// Now get the updated revision after syncing with origin
let (updated_rev, _) = git.current_rev_branch(&submodule_path)?;
// Update the lockfile entry to reflect the latest commit
let prev = std::mem::replace(
dep_id,
DepIdentifier::Branch {
name: name.to_string(),
rev: updated_rev,
r#override: true,
},
);
// Only insert if we don't already have a previous state for this path
// (e.g., from explicit overrides where we converted tag to branch)
if !prev_dep_ids.contains_key(path) {
prev_dep_ids.insert(path.to_owned(), prev);
}
}
// checkout the submodules at the correct tags
// Skip branches that were already updated above to avoid reverting to local branch
for (path, dep_id) in foundry_lock.iter() {
// Ignore other dependencies if single update.
if !dep_overrides.is_empty() && !dep_overrides.contains_key(path) {
continue;
}
// Skip branches that were already updated
if dep_id.is_branch() && dep_id.overridden() {
continue;
}
git.checkout_at(dep_id.checkout_id(), &root.join(path))?;
}
if out_of_sync_deps.is_some_and(|o| !o.is_empty())
|| foundry_lock.iter().any(|(_, dep_id)| dep_id.overridden())
{
foundry_lock.write()?;
}
// Print updates from => to
for (path, prev) in prev_dep_ids {
let curr = foundry_lock.get(&path).unwrap();
sh_println!(
"Updated dep at '{}', (from: {prev}, to: {curr})",
path.display().green(),
prev = prev,
curr = curr.yellow()
)?;
}
Ok(())
}
/// Returns the `lib/paths` of the dependencies that have been updated/overridden.
fn update_dep_paths(&self, foundry_lock: &Lockfile<'_>) -> Vec<PathBuf> {
foundry_lock
.iter()
.filter_map(|(path, dep_id)| {
if dep_id.overridden() {
return Some(path.to_path_buf());
}
None
})
.collect()
}
/// Fetches and checks out the latest version of a branch from origin
fn fetch_and_checkout_branch(git: &Git<'_>, path: &Path, branch: &str) -> Result<()> {
// Fetch the latest changes from origin for the branch
git.cmd_at(path).args(["fetch", "origin", branch]).exec().wrap_err(format!(
"Could not fetch latest changes for branch {} in submodule at {}",
branch,
path.display()
))?;
// Checkout and track the remote branch to ensure we have the latest commit
// Using checkout -B ensures the local branch tracks origin/branch
git.cmd_at(path)
.args(["checkout", "-B", branch, &format!("origin/{branch}")])
.exec()
.wrap_err(format!(
"Could not checkout and track origin/{} for submodule at {}",
branch,
path.display()
))?;
Ok(())
}
}
/// Returns `(root, paths, overridden_deps_with_abosolute_paths)` where `root` is the root of the
/// Git repository and `paths` are the relative paths of the dependencies.
#[allow(clippy::type_complexity)]
pub fn dependencies_paths(
deps: &[Dependency],
config: &Config,
) -> Result<(PathBuf, Vec<PathBuf>, HashMap<PathBuf, String>)> {
let git_root = Git::root_of(&config.root)?;
let libs = config.install_lib_dir();
if deps.is_empty() {
return Ok((git_root, Vec::new(), HashMap::default()));
}
let mut paths = Vec::with_capacity(deps.len());
let mut overrides = HashMap::with_capacity_and_hasher(deps.len(), Default::default());
for dep in deps {
let name = dep.name();
let dep_path = libs.join(name);
if !dep_path.exists() {
eyre::bail!("Could not find dependency {name:?} in {}", dep_path.display());
}
let rel_path = dep_path
.strip_prefix(&git_root)
.wrap_err("Library directory is not relative to the repository root")?;
if let Some(tag) = &dep.tag {
overrides.insert(dep_path.to_owned(), tag.to_owned());
}
paths.push(rel_path.to_owned());
}
Ok((git_root, paths, overrides))
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/flatten.rs | crates/forge/src/cmd/flatten.rs | use clap::{Parser, ValueHint};
use eyre::Result;
use foundry_cli::{
opts::{BuildOpts, ProjectPathOpts},
utils::LoadConfig,
};
use foundry_common::{flatten, fs};
use std::path::PathBuf;
/// CLI arguments for `forge flatten`.
#[derive(Clone, Debug, Parser)]
pub struct FlattenArgs {
/// The path to the contract to flatten.
#[arg(value_hint = ValueHint::FilePath, value_name = "PATH")]
pub target_path: PathBuf,
/// The path to output the flattened contract.
///
/// If not specified, the flattened contract will be output to stdout.
#[arg(
long,
short,
value_hint = ValueHint::FilePath,
value_name = "PATH",
)]
pub output: Option<PathBuf>,
#[command(flatten)]
project_paths: ProjectPathOpts,
}
impl FlattenArgs {
pub fn run(self) -> Result<()> {
let Self { target_path, output, project_paths } = self;
// flatten is a subset of `BuildArgs` so we can reuse that to get the config
let build = BuildOpts { project_paths, ..Default::default() };
let config = build.load_config()?;
let project = config.ephemeral_project()?;
let target_path = dunce::canonicalize(target_path)?;
let flattened = flatten(project, &target_path)?;
match output {
Some(output) => {
fs::create_dir_all(output.parent().unwrap())?;
fs::write(&output, flattened)?;
sh_println!("Flattened file written at {}", output.display())?;
}
None => sh_println!("{flattened}")?,
};
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
foundry-rs/foundry | https://github.com/foundry-rs/foundry/blob/271c34d2550474a8ded7fbeedff398b920a96689/crates/forge/src/cmd/lint.rs | crates/forge/src/cmd/lint.rs | use clap::{Parser, ValueHint};
use eyre::{Result, eyre};
use forge_lint::{
linter::Linter,
sol::{SolLint, SolLintError, SolidityLinter},
};
use foundry_cli::{
opts::{BuildOpts, configure_pcx_from_solc, get_solar_sources_from_compile_output},
utils::{FoundryPathExt, LoadConfig},
};
use foundry_common::{compile::ProjectCompiler, shell};
use foundry_compilers::{solc::SolcLanguage, utils::SOLC_EXTENSIONS};
use foundry_config::{filter::expand_globs, lint::Severity};
use std::path::PathBuf;
/// CLI arguments for `forge lint`.
#[derive(Clone, Debug, Parser)]
pub struct LintArgs {
/// Path to the file to be checked. Overrides the `ignore` project config.
#[arg(value_hint = ValueHint::FilePath, value_name = "PATH", num_args(1..))]
pub(crate) paths: Vec<PathBuf>,
/// Specifies which lints to run based on severity. Overrides the `severity` project config.
///
/// Supported values: `high`, `med`, `low`, `info`, `gas`.
#[arg(long, value_name = "SEVERITY", num_args(1..))]
pub(crate) severity: Option<Vec<Severity>>,
/// Specifies which lints to run based on their ID (e.g., "incorrect-shift"). Overrides the
/// `exclude_lints` project config.
#[arg(long = "only-lint", value_name = "LINT_ID", num_args(1..))]
pub(crate) lint: Option<Vec<String>>,
#[command(flatten)]
pub(crate) build: BuildOpts,
}
foundry_config::impl_figment_convert!(LintArgs, build);
impl LintArgs {
pub fn run(self) -> Result<()> {
let config = self.load_config()?;
let project = config.solar_project()?;
let path_config = config.project_paths();
// Expand ignore globs and canonicalize from the get go
let ignored = expand_globs(&config.root, config.lint.ignore.iter())?
.iter()
.flat_map(foundry_common::fs::canonicalize_path)
.collect::<Vec<_>>();
let cwd = std::env::current_dir()?;
let input = match &self.paths[..] {
[] => {
// Retrieve the project paths, and filter out the ignored ones.
config
.project_paths::<SolcLanguage>()
.input_files_iter()
.filter(|p| !(ignored.contains(p) || ignored.contains(&cwd.join(p))))
.collect()
}
paths => {
// Override default excluded paths and only lint the input files.
let mut inputs = Vec::with_capacity(paths.len());
for path in paths {
if path.is_dir() {
inputs
.extend(foundry_compilers::utils::source_files(path, SOLC_EXTENSIONS));
} else if path.is_sol() {
inputs.push(path.to_path_buf());
} else {
warn!("cannot process path {}", path.display());
}
}
inputs
}
};
if input.is_empty() {
sh_println!("nothing to lint")?;
return Ok(());
}
let parse_lints = |lints: &[String]| -> Result<Vec<SolLint>, SolLintError> {
lints.iter().map(|s| SolLint::try_from(s.as_str())).collect()
};
// Override default lint config with user-defined lints
// When --only-lint is used, bypass the severity filter by setting it to None
let (include, exclude, severity) = match &self.lint {
Some(cli_lints) => (Some(parse_lints(cli_lints)?), None, vec![]),
None => {
let severity = self.severity.clone().unwrap_or(config.lint.severity.clone());
(None, Some(parse_lints(&config.lint.exclude_lints)?), severity)
}
};
if project.compiler.solc.is_none() {
return Err(eyre!("linting not supported for this language"));
}
let linter = SolidityLinter::new(path_config)
.with_json_emitter(shell::is_json())
.with_description(true)
.with_lints(include)
.without_lints(exclude)
.with_severity(if severity.is_empty() { None } else { Some(severity) })
.with_mixed_case_exceptions(&config.lint.mixed_case_exceptions);
let output = ProjectCompiler::new().files(input.iter().cloned()).compile(&project)?;
let solar_sources = get_solar_sources_from_compile_output(&config, &output, Some(&input))?;
if solar_sources.input.sources.is_empty() {
return Err(eyre!(
"unable to lint. Solar only supports Solidity versions prior to 0.8.0"
));
}
// NOTE(rusowsky): Once solar can drop unsupported versions, rather than creating a new
// compiler, we should reuse the parser from the project output.
let mut compiler = solar::sema::Compiler::new(
solar::interface::Session::builder().with_stderr_emitter().build(),
);
// Load the solar-compatible sources to the pcx before linting
compiler.enter_mut(|compiler| {
let mut pcx = compiler.parse();
pcx.set_resolve_imports(true);
configure_pcx_from_solc(&mut pcx, &config.project_paths(), &solar_sources, true);
pcx.parse();
});
linter.lint(&input, config.deny, &mut compiler)?;
Ok(())
}
}
| rust | Apache-2.0 | 271c34d2550474a8ded7fbeedff398b920a96689 | 2026-01-04T15:43:23.630446Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.